You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/10/31 17:00:41 UTC

svn commit: r1537540 [4/10] - in /hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out Thu Oct 31 16:00:36 2013
@@ -29,32 +29,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key + key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key + key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key + key FROM DECIMAL_UDF
@@ -111,32 +100,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key + value)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key + value)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key + value FROM DECIMAL_UDF
@@ -193,32 +171,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key + (value / 2))
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key + (value / 2))
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key + (value/2) FROM DECIMAL_UDF
@@ -275,32 +242,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key) '1.0')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key + '1.0')
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key + '1.0')
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key + '1.0' FROM DECIMAL_UDF
@@ -359,32 +315,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key - key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key - key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key - key FROM DECIMAL_UDF
@@ -441,32 +386,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key - value)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key - value)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key - value FROM DECIMAL_UDF
@@ -523,32 +457,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key - (value / 2))
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key - (value / 2))
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key - (value/2) FROM DECIMAL_UDF
@@ -605,32 +528,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key) '1.0')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key - '1.0')
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key - '1.0')
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key - '1.0' FROM DECIMAL_UDF
@@ -689,32 +601,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key * key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key * key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key * key FROM DECIMAL_UDF
@@ -771,32 +672,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key * value)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key * value)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key * value FROM DECIMAL_UDF
@@ -853,32 +743,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key * (value / 2))
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key * (value / 2))
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key * (value/2) FROM DECIMAL_UDF
@@ -935,32 +814,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (* (TOK_TABLE_OR_COL key) '2.0')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key * '2.0')
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key * '2.0')
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key * '2.0' FROM DECIMAL_UDF
@@ -1019,33 +887,22 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) 0))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key / 0)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key / 0)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT key / 0 FROM DECIMAL_UDF limit 1
@@ -1065,33 +922,22 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) TOK_NULL))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key / null)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key / null)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT key / NULL FROM DECIMAL_UDF limit 1
@@ -1111,36 +957,25 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL key)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL key)) (<> (TOK_TABLE_OR_COL key) 0)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Filter Operator
-              predicate:
-                  expr: (key is not null and (key <> 0))
-                  type: boolean
-              Select Operator
-                expressions:
-                      expr: (key / key)
-                      type: decimal(65,30)
-                outputColumnNames: _col0
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Filter Operator
+            predicate:
+                expr: (key is not null and (key <> 0))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: (key / key)
+                    type: decimal(65,30)
+              outputColumnNames: _col0
+              ListSink
 
 
 PREHOOK: query: SELECT key / key FROM DECIMAL_UDF WHERE key is not null and key <> 0
@@ -1193,36 +1028,25 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Filter Operator
-              predicate:
-                  expr: (value is not null and (value <> 0))
-                  type: boolean
-              Select Operator
-                expressions:
-                      expr: (key / value)
-                      type: decimal(65,30)
-                outputColumnNames: _col0
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Filter Operator
+            predicate:
+                expr: (value is not null and (value <> 0))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: (key / value)
+                    type: decimal(65,30)
+              outputColumnNames: _col0
+              ListSink
 
 
 PREHOOK: query: SELECT key / value FROM DECIMAL_UDF WHERE value is not null and value <> 0
@@ -1265,36 +1089,25 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) (/ (TOK_TABLE_OR_COL value) 2)))) (TOK_WHERE (and (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value)) (<> (TOK_TABLE_OR_COL value) 0)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Filter Operator
-              predicate:
-                  expr: (value is not null and (value <> 0))
-                  type: boolean
-              Select Operator
-                expressions:
-                      expr: (key / (value / 2))
-                      type: decimal(65,30)
-                outputColumnNames: _col0
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Filter Operator
+            predicate:
+                expr: (value is not null and (value <> 0))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: (key / (value / 2))
+                    type: decimal(65,30)
+              outputColumnNames: _col0
+              ListSink
 
 
 PREHOOK: query: SELECT key / (value/2) FROM DECIMAL_UDF  WHERE value is not null and value <> 0
@@ -1337,32 +1150,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL key) '2.0')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (key / '2.0')
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (key / '2.0')
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT key / '2.0' FROM DECIMAL_UDF
@@ -1421,32 +1223,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION abs (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: abs(key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: abs(key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT abs(key) FROM DECIMAL_UDF
@@ -1647,32 +1438,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: (- key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: (- key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT -key FROM DECIMAL_UDF
@@ -1731,32 +1511,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: key
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: key
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT +key FROM DECIMAL_UDF
@@ -1815,32 +1584,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION CEIL (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: ceil(key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: ceil(key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT CEIL(key) FROM DECIMAL_UDF
@@ -1899,32 +1657,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION FLOOR (TOK_TABLE_OR_COL key))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: floor(key)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: floor(key)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT FLOOR(key) FROM DECIMAL_UDF
@@ -1983,32 +1730,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ROUND (TOK_TABLE_OR_COL key) 2)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: round(key, 2)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: round(key, 2)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT ROUND(key, 2) FROM DECIMAL_UDF
@@ -2067,32 +1803,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION POWER (TOK_TABLE_OR_COL key) 2)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: power(key, 2)
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: power(key, 2)
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT POWER(key, 2) FROM DECIMAL_UDF
@@ -2151,32 +1876,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME DECIMAL_UDF))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (% (+ (TOK_TABLE_OR_COL key) 1) (/ (TOK_TABLE_OR_COL key) 2))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        decimal_udf 
-          TableScan
-            alias: decimal_udf
-            Select Operator
-              expressions:
-                    expr: ((key + 1) % (key / 2))
-                    type: decimal(65,30)
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: decimal_udf
+          Select Operator
+            expressions:
+                  expr: ((key + 1) % (key / 2))
+                  type: decimal(65,30)
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: SELECT (key + 1) % (key / 2) FROM DECIMAL_UDF

Modified: hive/trunk/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/disallow_incompatible_type_change_off.q.out Thu Oct 31 16:00:36 2013
@@ -12,11 +12,11 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE test_table123 (a INT, b MAP<STRING, STRING>) PARTITIONED BY (ds STRING) STORED AS SEQUENCEFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test_table123
-PREHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src LIMIT 1
+PREHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_table123@ds=foo1
-POSTHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src LIMIT 1
+POSTHOOK: query: INSERT OVERWRITE TABLE test_table123 PARTITION(ds="foo1") SELECT 1, MAP("a1", "b1") FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_table123@ds=foo1

Modified: hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out Thu Oct 31 16:00:36 2013
@@ -20,11 +20,11 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table T4 (c0 bigint, c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string, c8 string, c9 string, c10 string, c11 string, c12 string, c13 string, c14 string, c15 string, c16 string, c17 string, c18 string, c19 string, c20 string, c21 string, c22 string, c23 string, c24 string, c25 string, c26 string, c27 string, c28 string, c29 string, c30 string, c31 string, c32 string, c33 string, c34 string, c35 string, c36 string, c37 string, c38 string, c39 string, c40 string, c41 string, c42 string, c43 string, c44 string, c45 string, c46 string, c47 string, c48 string, c49 string, c50 string, c51 string, c52 string, c53 string, c54 string, c55 string, c56 string, c57 string, c58 string, c59 string, c60 string, c61 string, c62 string, c63 string, c64 string, c65 string, c66 string, c67 bigint, c68 string, c69 string, c70 bigint, c71 bigint, c72 bigint, c73 string, c74 string, c75 string, c76 string, c77 string, c78 string, c79 string, c80 string, c
 81 bigint, c82 bigint, c83 bigint) partitioned by (ds string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@T4
-PREHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src limit 1
+PREHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@t1@ds=2010-04-17
-POSTHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src limit 1
+POSTHOOK: query: insert overwrite table T1 partition (ds='2010-04-17') select '5', '1', '1', '1',  0, 0,4 from src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@t1@ds=2010-04-17
@@ -35,11 +35,11 @@ POSTHOOK: Lineage: t1 PARTITION(ds=2010-
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c5 SIMPLE []
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c6 SIMPLE []
 POSTHOOK: Lineage: t1 PARTITION(ds=2010-04-17).c7 SIMPLE []
-PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src limit 1
+PREHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@t2@ds=2010-04-17
-POSTHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src limit 1
+POSTHOOK: query: insert overwrite table T2 partition(ds='2010-04-17') select '5','name', NULL, '2', 'kavin',NULL, '9', 'c', '8', '0', '0', '7', '1','2', '0', '3','2', NULL, '1', NULL, '3','2','0','0','5','10' from src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@t2@ds=2010-04-17
@@ -76,11 +76,11 @@ POSTHOOK: Lineage: t2 PARTITION(ds=2010-
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c7 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c8 SIMPLE []
 POSTHOOK: Lineage: t2 PARTITION(ds=2010-04-17).c9 SIMPLE []
-PREHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src limit 1
+PREHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@t3@ds=2010-04-17
-POSTHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src limit 1
+POSTHOOK: query: insert overwrite table T3 partition (ds='2010-04-17') select 4,5,0 from src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@t3@ds=2010-04-17

Modified: hive/trunk/ql/src/test/results/clientpositive/literal_decimal.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/literal_decimal.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/literal_decimal.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/literal_decimal.q.out Thu Oct 31 16:00:36 2013
@@ -6,49 +6,38 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- 1BD)) (TOK_SELEXPR 0BD) (TOK_SELEXPR 1BD) (TOK_SELEXPR 3.14BD) (TOK_SELEXPR (- 3.14BD)) (TOK_SELEXPR 99999999999999999BD) (TOK_SELEXPR 99999999999999999.9999999999999BD) (TOK_SELEXPR 1E-99BD) (TOK_SELEXPR 1E99BD)) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: (- 1)
-                    type: decimal(65,30)
-                    expr: 0
-                    type: decimal(65,30)
-                    expr: 1
-                    type: decimal(65,30)
-                    expr: 3.14
-                    type: decimal(65,30)
-                    expr: (- 3.14)
-                    type: decimal(65,30)
-                    expr: 99999999999999999
-                    type: decimal(65,30)
-                    expr: 99999999999999999.9999999999999
-                    type: decimal(65,30)
-                    expr: 1E-99
-                    type: decimal(65,30)
-                    expr: 1E99
-                    type: decimal(65,30)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: (- 1)
+                  type: decimal(65,30)
+                  expr: 0
+                  type: decimal(65,30)
+                  expr: 1
+                  type: decimal(65,30)
+                  expr: 3.14
+                  type: decimal(65,30)
+                  expr: (- 3.14)
+                  type: decimal(65,30)
+                  expr: 99999999999999999
+                  type: decimal(65,30)
+                  expr: 99999999999999999.9999999999999
+                  type: decimal(65,30)
+                  expr: 1E-99
+                  type: decimal(65,30)
+                  expr: 1E99
+                  type: decimal(65,30)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT -1BD, 0BD, 1BD, 3.14BD, -3.14BD, 99999999999999999BD, 99999999999999999.9999999999999BD, 1E-99BD, 1E99BD FROM src LIMIT 1

Modified: hive/trunk/ql/src/test/results/clientpositive/literal_double.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/literal_double.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/literal_double.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/literal_double.q.out Thu Oct 31 16:00:36 2013
@@ -6,49 +6,38 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 3.14) (TOK_SELEXPR (- 3.14)) (TOK_SELEXPR 3.14e8) (TOK_SELEXPR 3.14e-8) (TOK_SELEXPR (- 3.14e8)) (TOK_SELEXPR (- 3.14e-8)) (TOK_SELEXPR 3.14e+8) (TOK_SELEXPR 3.14E8) (TOK_SELEXPR 3.14E-8)) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: 3.14
-                    type: double
-                    expr: (- 3.14)
-                    type: double
-                    expr: 3.14E8
-                    type: double
-                    expr: 3.14E-8
-                    type: double
-                    expr: (- 3.14E8)
-                    type: double
-                    expr: (- 3.14E-8)
-                    type: double
-                    expr: 3.14E8
-                    type: double
-                    expr: 3.14E8
-                    type: double
-                    expr: 3.14E-8
-                    type: double
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: 3.14
+                  type: double
+                  expr: (- 3.14)
+                  type: double
+                  expr: 3.14E8
+                  type: double
+                  expr: 3.14E-8
+                  type: double
+                  expr: (- 3.14E8)
+                  type: double
+                  expr: (- 3.14E-8)
+                  type: double
+                  expr: 3.14E8
+                  type: double
+                  expr: 3.14E8
+                  type: double
+                  expr: 3.14E-8
+                  type: double
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT 3.14, -3.14, 3.14e8, 3.14e-8, -3.14e8, -3.14e-8, 3.14e+8, 3.14E8, 3.14E-8 FROM src LIMIT 1

Modified: hive/trunk/ql/src/test/results/clientpositive/literal_ints.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/literal_ints.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/literal_ints.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/literal_ints.q.out Thu Oct 31 16:00:36 2013
@@ -6,39 +6,28 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 100) (TOK_SELEXPR 100Y) (TOK_SELEXPR 100S) (TOK_SELEXPR 100L)) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: 100
-                    type: int
-                    expr: 100
-                    type: tinyint
-                    expr: 100
-                    type: smallint
-                    expr: 100
-                    type: bigint
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: 100
+                  type: int
+                  expr: 100
+                  type: tinyint
+                  expr: 100
+                  type: smallint
+                  expr: 100
+                  type: bigint
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT 100, 100Y, 100S, 100L FROM src LIMIT 1

Modified: hive/trunk/ql/src/test/results/clientpositive/literal_string.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/literal_string.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/literal_string.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/literal_string.q.out Thu Oct 31 16:00:36 2013
@@ -16,51 +16,40 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'face''book') (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'book')) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'book')) (TOK_SELEXPR "face""book") (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE "face" "book")) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE "face" "book")) (TOK_SELEXPR (TOK_STRINGLITERALSEQUENCE 'face' 'bo' 'ok')) (TOK_SELEXPR 'face'"book") (TOK_SELEXPR "face"'book') (TOK_SELEXPR 'facebook')) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-                    expr: 'facebook'
-                    type: string
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+                  expr: 'facebook'
+                  type: string
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT 'face''book', 'face' 'book', 'face'

Modified: hive/trunk/ql/src/test/results/clientpositive/macro.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/macro.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/macro.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/macro.q.out Thu Oct 31 16:00:36 2013
@@ -19,33 +19,22 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: SIGMOID(2)
-                    type: double
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: SIGMOID(2)
+                  type: double
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT SIGMOID(2) FROM src LIMIT 1
@@ -56,94 +45,23 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            GatherStats: false
-            Select Operator
-              expressions:
-                    expr: SIGMOID(2)
-                    type: double
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0
-                        columns.types double
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numPartitions 0
-              numRows 0
-              rawDataSize 0
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numPartitions 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          GatherStats: false
+          Select Operator
+            expressions:
+                  expr: SIGMOID(2)
+                  type: double
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: DROP TEMPORARY MACRO SIGMOID
@@ -171,33 +89,22 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: (FIXED_NUMBER() + 1)
-                    type: int
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: (FIXED_NUMBER() + 1)
+                  type: int
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
@@ -208,94 +115,23 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            GatherStats: false
-            Select Operator
-              expressions:
-                    expr: (FIXED_NUMBER() + 1)
-                    type: int
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0
-                        columns.types int
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numPartitions 0
-              numRows 0
-              rawDataSize 0
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numPartitions 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          GatherStats: false
+          Select Operator
+            expressions:
+                  expr: (FIXED_NUMBER() + 1)
+                  type: int
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: DROP TEMPORARY MACRO FIXED_NUMBER
@@ -344,33 +180,22 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: SIMPLE_ADD(1, 9)
-                    type: int
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: SIMPLE_ADD(1, 9)
+                  type: int
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: EXPLAIN EXTENDED SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
@@ -381,94 +206,23 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            GatherStats: false
-            Select Operator
-              expressions:
-                    expr: SIMPLE_ADD(1, 9)
-                    type: int
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0
-                        columns.types int
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numPartitions 0
-              numRows 0
-              rawDataSize 0
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numPartitions 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          GatherStats: false
+          Select Operator
+            expressions:
+                  expr: SIMPLE_ADD(1, 9)
+                  type: int
+            outputColumnNames: _col0
+            Limit
+              ListSink
 
 
 PREHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD

Modified: hive/trunk/ql/src/test/results/clientpositive/null_cast.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/null_cast.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/null_cast.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/null_cast.q.out Thu Oct 31 16:00:36 2013
@@ -2,16 +2,16 @@ PREHOOK: query: EXPLAIN SELECT ARRAY(NUL
                ARRAY(NULL, ARRAY()),
                ARRAY(NULL, MAP()),
                ARRAY(NULL, STRUCT(0))
-        FROM src LIMIT 1
+        FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT ARRAY(NULL, 0), 
                ARRAY(NULL, ARRAY()),
                ARRAY(NULL, MAP()),
                ARRAY(NULL, STRUCT(0))
-        FROM src LIMIT 1
+        FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL 0)) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION ARRAY))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION MAP))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION STRUCT 0)))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL 0)) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION ARRAY))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION MAP))) (TOK_SELEXPR (TOK_FUNCTION ARRAY TOK_NULL (TOK_FUNCTION STRUCT 0))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -24,6 +24,7 @@ STAGE PLANS:
         src 
           TableScan
             alias: src
+            Row Limit Per Split: 1
             Select Operator
               expressions:
                     expr: array(null,0)
@@ -35,25 +36,24 @@ STAGE PLANS:
                     expr: array(null,struct(0))
                     type: array<struct<col1:int>>
               outputColumnNames: _col0, _col1, _col2, _col3
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
 
 
 PREHOOK: query: SELECT ARRAY(NULL, 0), 
        ARRAY(NULL, ARRAY()),
        ARRAY(NULL, MAP()),
        ARRAY(NULL, STRUCT(0))
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -61,7 +61,7 @@ POSTHOOK: query: SELECT ARRAY(NULL, 0), 
        ARRAY(NULL, ARRAY()),
        ARRAY(NULL, MAP()),
        ARRAY(NULL, STRUCT(0))
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/trunk/ql/src/test/results/clientpositive/num_op_type_conv.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/num_op_type_conv.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/num_op_type_conv.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/num_op_type_conv.q.out Thu Oct 31 16:00:36 2013
@@ -12,43 +12,32 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ TOK_NULL 7)) (TOK_SELEXPR (- 1.0 TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_TINYINT 5))) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_BIGINT 21))) (TOK_SELEXPR (% 9 "3"))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: (null + 7)
-                    type: int
-                    expr: (1.0 - null)
-                    type: double
-                    expr: (null + null)
-                    type: tinyint
-                    expr: (UDFToLong(21) % UDFToByte(5))
-                    type: bigint
-                    expr: (UDFToLong(21) % UDFToLong(21))
-                    type: bigint
-                    expr: (9 % '3')
-                    type: double
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: (null + 7)
+                  type: int
+                  expr: (1.0 - null)
+                  type: double
+                  expr: (null + null)
+                  type: tinyint
+                  expr: (UDFToLong(21) % UDFToByte(5))
+                  type: bigint
+                  expr: (UDFToLong(21) % UDFToLong(21))
+                  type: bigint
+                  expr: (9 % '3')
+                  type: double
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT null + 7, 1.0 - null, null + null,

Modified: hive/trunk/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out Thu Oct 31 16:00:36 2013
@@ -15,7 +15,7 @@ PREHOOK: query: -- Create a table with o
 -- to another partition
 -- This can produce unexpected results with CombineHiveInputFormat
 
-INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5
+INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc@part=1
@@ -23,7 +23,7 @@ POSTHOOK: query: -- Create a table with 
 -- to another partition
 -- This can produce unexpected results with CombineHiveInputFormat
 
-INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5
+INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc@part=1

Modified: hive/trunk/ql/src/test/results/clientpositive/orc_empty_strings.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/orc_empty_strings.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/orc_empty_strings.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/orc_empty_strings.q.out Thu Oct 31 16:00:36 2013
@@ -9,11 +9,11 @@ STORED AS INPUTFORMAT 'org.apache.hadoop
 OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test_orc
-PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10
+PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc
-POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10
+POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc
@@ -41,11 +41,11 @@ POSTHOOK: Lineage: test_orc.key SIMPLE [
 
 
 
-PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10
+PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc
-POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10
+POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc

Modified: hive/trunk/ql/src/test/results/clientpositive/partcols1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partcols1.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partcols1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/partcols1.q.out Thu Oct 31 16:00:36 2013
@@ -4,12 +4,12 @@ POSTHOOK: query: create table test1(col1
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test1
 PREHOOK: query: insert overwrite table test1 partition (partitionId=1)
-  select key from src limit 10
+  select key from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test1@partitionid=1
 POSTHOOK: query: insert overwrite table test1 partition (partitionId=1)
-  select key from src limit 10
+  select key from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test1@partitionid=1

Modified: hive/trunk/ql/src/test/results/clientpositive/partition_date.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partition_date.q.out?rev=1537540&r1=1537539&r2=1537540&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partition_date.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/partition_date.q.out Thu Oct 31 16:00:36 2013
@@ -7,25 +7,25 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table partition_date_1 (key string, value string) partitioned by (dt date, region int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@partition_date_1
-PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) 
-  select * from src limit 10
+PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1)
+  select * from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1
-POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) 
-  select * from src limit 10
+POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1)
+  select * from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) 
-  select * from src limit 5
+PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2)
+  select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2
-POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) 
-  select * from src limit 5
+POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2)
+  select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2
@@ -34,12 +34,12 @@ POSTHOOK: Lineage: partition_date_1 PART
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) 
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1
 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) 
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1
@@ -50,12 +50,12 @@ POSTHOOK: Lineage: partition_date_1 PART
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) 
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10
 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) 
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10