You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2020/05/18 11:13:49 UTC

[hive] branch master updated: HIVE-23470 Move TestCliDriver tests to TestMiniLlapCliDriver if they are failing with TestMiniLlapLocalCliDriver (Miklos Gergely, reviewd by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new c46bed6  HIVE-23470 Move TestCliDriver tests to TestMiniLlapCliDriver if they are failing with TestMiniLlapLocalCliDriver (Miklos Gergely, reviewd by Jesus Camacho Rodriguez)
c46bed6 is described below

commit c46bed637728f10482987f7a52a17f6399a9f6e6
Author: miklosgergely <mg...@cloudera.com>
AuthorDate: Mon May 18 12:59:56 2020 +0200

    HIVE-23470 Move TestCliDriver tests to TestMiniLlapCliDriver if they are failing with TestMiniLlapLocalCliDriver (Miklos Gergely, reviewd by Jesus Camacho Rodriguez)
---
 .../test/resources/testconfiguration.properties    |   60 +-
 ql/src/test/queries/clientpositive/input5.q        |    2 +
 .../test/queries/clientpositive/regexp_extract.q   |    2 +
 .../queries/clientpositive/select_transform_hint.q |    2 +
 .../clientpositive/create_genericudaf.q.out        |  107 -
 ql/src/test/results/clientpositive/input14.q.out   |  250 ---
 .../results/clientpositive/input14_limit.q.out     |  163 --
 ql/src/test/results/clientpositive/input17.q.out   |  174 --
 ql/src/test/results/clientpositive/input18.q.out   |  250 ---
 ql/src/test/results/clientpositive/input5.q.out    |  174 --
 .../test/results/clientpositive/insert_into3.q.out |  568 ------
 .../test/results/clientpositive/insert_into4.q.out |  460 -----
 .../test/results/clientpositive/insert_into5.q.out |  637 ------
 .../test/results/clientpositive/insert_into6.q.out |  383 ----
 .../{ => llap}/autoColumnStats_6.q.out             |  162 +-
 .../{ => llap}/autogen_colalias.q.out              |    0
 .../{ => llap}/binary_output_format.q.out          |  434 ++--
 .../clientpositive/llap/create_genericudaf.q.out   |  114 ++
 .../clientpositive/{ => llap}/create_udaf.q.out    |   24 +-
 .../clientpositive/{ => llap}/create_view.q.out    |  150 +-
 .../{ => llap}/gen_udf_example_add10.q.out         |   82 +-
 .../{ => llap}/groupby_bigdata.q.out               |    4 +-
 .../test/results/clientpositive/llap/input14.q.out |  252 +++
 .../clientpositive/llap/input14_limit.q.out        |  177 ++
 .../test/results/clientpositive/llap/input17.q.out |  176 ++
 .../test/results/clientpositive/llap/input18.q.out |  252 +++
 .../clientpositive/{ => llap}/input20.q.out        |  181 +-
 .../clientpositive/{ => llap}/input33.q.out        |  181 +-
 .../clientpositive/{ => llap}/input34.q.out        |  174 +-
 .../clientpositive/{ => llap}/input35.q.out        |  174 +-
 .../clientpositive/{ => llap}/input36.q.out        |  174 +-
 .../clientpositive/{ => llap}/input38.q.out        |  160 +-
 .../test/results/clientpositive/llap/input5.q.out  |  176 ++
 .../results/clientpositive/llap/insert_into3.q.out |  546 ++++++
 .../results/clientpositive/llap/insert_into4.q.out |  436 +++++
 .../results/clientpositive/llap/insert_into5.q.out |  550 ++++++
 .../results/clientpositive/llap/insert_into6.q.out |  356 ++++
 .../{ => llap}/load_binary_data.q.out              |  Bin 2392 -> 2405 bytes
 .../clientpositive/{ => llap}/macro_1.q.out        |   12 +-
 .../{ => llap}/macro_duplicate.q.out               |    4 +-
 .../clientpositive/{ => llap}/mapreduce3.q.out     |  164 +-
 .../clientpositive/{ => llap}/mapreduce4.q.out     |  166 +-
 .../clientpositive/{ => llap}/mapreduce7.q.out     |  164 +-
 .../clientpositive/{ => llap}/mapreduce8.q.out     |  166 +-
 .../{ => llap}/merge_test_dummy_operator.q.out     |    4 +-
 .../clientpositive/{ => llap}/newline.q.out        |   44 +-
 .../llap/nonreserved_keywords_insert_into1.q.out   |  449 +++++
 .../clientpositive/{ => llap}/nullscript.q.out     |   55 +-
 .../clientpositive/{ => llap}/orc_createas1.q.out  |  386 ++--
 .../clientpositive/{ => llap}/partcols1.q.out      |    4 +-
 .../clientpositive/llap/ppd_transform.q.out        |  459 +++++
 .../{ => llap}/query_with_semi.q.out               |   12 +-
 .../clientpositive/{ => llap}/rcfile_bigdata.q.out |    4 +-
 .../clientpositive/llap/regexp_extract.q.out       |  512 +++++
 .../{ => llap}/script_env_var1.q.out               |    4 +-
 .../{ => llap}/script_env_var2.q.out               |    4 +-
 .../results/clientpositive/llap/script_pipe.q.out  |  134 ++
 .../clientpositive/{ => llap}/scriptfile1.q.out    |    4 +-
 .../{ => llap}/select_transform_hint.q.out         | 2062 ++++++++++----------
 .../clientpositive/{ => llap}/str_to_map.q.out     |  106 +-
 .../{ => llap}/temp_table_partcols1.q.out          |    4 +-
 .../results/clientpositive/llap/transform1.q.out   |  150 ++
 .../clientpositive/{ => llap}/transform2.q.out     |    4 +-
 .../clientpositive/{ => llap}/transform3.q.out     |    4 +-
 .../clientpositive/llap/transform_ppr1.q.out       |  487 +++++
 .../clientpositive/llap/transform_ppr2.q.out       |  402 ++++
 .../clientpositive/{ => llap}/udaf_sum_list.q.out  |    4 +-
 .../clientpositive/{ => llap}/udf_printf.q.out     |   26 +-
 .../clientpositive/{ => llap}/union23.q.out        |  114 +-
 .../clientpositive/{ => llap}/union_script.q.out   |    8 +-
 .../{ => llap}/vector_custom_udf_configure.q.out   |   45 +-
 .../results/clientpositive/llap/vector_udf3.q.out  |  106 +
 .../nonreserved_keywords_insert_into1.q.out        |  440 -----
 .../results/clientpositive/ppd_transform.q.out     |  436 -----
 .../results/clientpositive/regexp_extract.q.out    |  492 -----
 .../test/results/clientpositive/script_pipe.q.out  |  123 --
 .../test/results/clientpositive/transform1.q.out   |  144 --
 .../results/clientpositive/transform_ppr1.q.out    |  468 -----
 .../results/clientpositive/transform_ppr2.q.out    |  389 ----
 .../test/results/clientpositive/vector_udf3.q.out  |  103 -
 80 files changed, 8411 insertions(+), 8624 deletions(-)

diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 2b98dc3..d6339f3 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -429,7 +429,65 @@ minillap.query.files=acid_bucket_pruning.q,\
   cttl.q,\
   vector_offset_limit.q,\
   temp_table_add_part_with_loc.q,\
-  temp_table_drop_partitions_filter4.q
+  temp_table_drop_partitions_filter4.q,\
+  autoColumnStats_6.q,\
+  autogen_colalias.q,\
+  binary_output_format.q,\
+  create_genericudaf.q,\
+  create_udaf.q,\
+  create_view.q,\
+  gen_udf_example_add10.q,\
+  groupby_bigdata.q,\
+  input14.q,\
+  input14_limit.q,\
+  input17.q,\
+  input18.q,\
+  input20.q,\
+  input33.q,\
+  input34.q,\
+  input35.q,\
+  input36.q,\
+  input38.q,\
+  input5.q,\
+  insert_into3.q,\
+  insert_into4.q,\
+  insert_into5.q,\
+  insert_into6.q,\
+  load_binary_data.q,\
+  macro_1.q,\
+  macro_duplicate.q,\
+  mapreduce3.q,\
+  mapreduce4.q,\
+  mapreduce7.q,\
+  mapreduce8.q,\
+  merge_test_dummy_operator.q,\
+  newline.q,\
+  nonreserved_keywords_insert_into1.q,\
+  nullscript.q,\
+  orc_createas1.q,\
+  partcols1.q,\
+  ppd_transform.q,\
+  query_with_semi.q,\
+  rcfile_bigdata.q,\
+  regexp_extract.q,\
+  script_env_var1.q,\
+  script_env_var2.q,\
+  script_pipe.q,\
+  scriptfile1.q,\
+  select_transform_hint.q,\
+  str_to_map.q,\
+  temp_table_partcols1.q,\
+  transform1.q,\
+  transform2.q,\
+  transform3.q,\
+  transform_ppr1.q,\
+  transform_ppr2.q,\
+  udaf_sum_list.q,\
+  udf_printf.q,\
+  union23.q,\
+  union_script.q,\
+  vector_custom_udf_configure.q,\
+  vector_udf3.q
 
 minillaplocal.query.files=\
   empty_files_external_table.q,\
diff --git a/ql/src/test/queries/clientpositive/input5.q b/ql/src/test/queries/clientpositive/input5.q
index bf598de..72a1e74 100644
--- a/ql/src/test/queries/clientpositive/input5.q
+++ b/ql/src/test/queries/clientpositive/input5.q
@@ -1,4 +1,6 @@
 --! qt:dataset:src_thrift
+-- SORT_QUERY_RESULTS
+
 CREATE TABLE dest1_n94(key STRING, value STRING) STORED AS TEXTFILE;
 
 EXPLAIN
diff --git a/ql/src/test/queries/clientpositive/regexp_extract.q b/ql/src/test/queries/clientpositive/regexp_extract.q
index 678b8fa..e32094b 100644
--- a/ql/src/test/queries/clientpositive/regexp_extract.q
+++ b/ql/src/test/queries/clientpositive/regexp_extract.q
@@ -1,4 +1,6 @@
 --! qt:dataset:src
+-- SORT_QUERY_RESULTS
+
 EXPLAIN EXTENDED
 FROM (
   FROM src
diff --git a/ql/src/test/queries/clientpositive/select_transform_hint.q b/ql/src/test/queries/clientpositive/select_transform_hint.q
index 39577d1..9ad7a92 100644
--- a/ql/src/test/queries/clientpositive/select_transform_hint.q
+++ b/ql/src/test/queries/clientpositive/select_transform_hint.q
@@ -1,4 +1,6 @@
 --! qt:dataset:src
+-- SORT_QUERY_RESULTS
+
 set hive.entity.capture.transform=true;
 
 EXPLAIN
diff --git a/ql/src/test/results/clientpositive/create_genericudaf.q.out b/ql/src/test/results/clientpositive/create_genericudaf.q.out
deleted file mode 100644
index 85d7850..0000000
--- a/ql/src/test/results/clientpositive/create_genericudaf.q.out
+++ /dev/null
@@ -1,107 +0,0 @@
-PREHOOK: query: EXPLAIN
-CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
-PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: test_avg
-POSTHOOK: query: EXPLAIN
-CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
-POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: test_avg
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Create Function
-      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage
-      name: test_avg
-      temporary: true
-
-PREHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
-PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: test_avg
-POSTHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
-POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: test_avg
-PREHOOK: query: EXPLAIN
-SELECT
-    test_avg(1),
-    test_avg(substr(value,5))
-FROM src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: EXPLAIN
-SELECT
-    test_avg(1),
-    test_avg(substr(value,5))
-FROM src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: substr(value, 5) (type: string)
-              outputColumnNames: _col1
-              Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-              Group By Operator
-                aggregations: test_avg(1), test_avg(_col1)
-                minReductionHashAggr: 0.99
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>), _col1 (type: struct<count:bigint,sum:double,input:string>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: test_avg(VALUE._col0), test_avg(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT
-    test_avg(1),
-    test_avg(substr(value,5))
-FROM src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT
-    test_avg(1),
-    test_avg(substr(value,5))
-FROM src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-1.0	260.182
-PREHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
-PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: test_avg
-POSTHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
-POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: test_avg
diff --git a/ql/src/test/results/clientpositive/input14.q.out b/ql/src/test/results/clientpositive/input14.q.out
deleted file mode 100644
index 0e61434..0000000
--- a/ql/src/test/results/clientpositive/input14.q.out
+++ /dev/null
@@ -1,250 +0,0 @@
-PREHOOK: query: CREATE TABLE dest1_n42(key INT, value STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest1_n42
-POSTHOOK: query: CREATE TABLE dest1_n42(key INT, value STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest1_n42
-PREHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n42
-POSTHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n42
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: (_col0 < 100) (type: boolean)
-                  Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    null sort order: a
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n42
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: string)
-            outputColumnNames: key, value
-            Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n42
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.dest1_n42
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n42
-POSTHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n42
-POSTHOOK: Lineage: dest1_n42.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: dest1_n42.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT dest1_n42.* FROM dest1_n42
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest1_n42
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT dest1_n42.* FROM dest1_n42
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest1_n42
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-11	val_11
-12	val_12
-12	val_12
-15	val_15
-15	val_15
-17	val_17
-18	val_18
-18	val_18
-19	val_19
-2	val_2
-20	val_20
-24	val_24
-24	val_24
-26	val_26
-26	val_26
-27	val_27
-28	val_28
-30	val_30
-33	val_33
-34	val_34
-35	val_35
-35	val_35
-35	val_35
-37	val_37
-37	val_37
-4	val_4
-41	val_41
-42	val_42
-42	val_42
-43	val_43
-44	val_44
-47	val_47
-5	val_5
-5	val_5
-5	val_5
-51	val_51
-51	val_51
-53	val_53
-54	val_54
-57	val_57
-58	val_58
-58	val_58
-64	val_64
-65	val_65
-66	val_66
-67	val_67
-67	val_67
-69	val_69
-70	val_70
-70	val_70
-70	val_70
-72	val_72
-72	val_72
-74	val_74
-76	val_76
-76	val_76
-77	val_77
-78	val_78
-8	val_8
-80	val_80
-82	val_82
-83	val_83
-83	val_83
-84	val_84
-84	val_84
-85	val_85
-86	val_86
-87	val_87
-9	val_9
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
diff --git a/ql/src/test/results/clientpositive/input14_limit.q.out b/ql/src/test/results/clientpositive/input14_limit.q.out
deleted file mode 100644
index fe9d907..0000000
--- a/ql/src/test/results/clientpositive/input14_limit.q.out
+++ /dev/null
@@ -1,163 +0,0 @@
-PREHOOK: query: CREATE TABLE dest1_n13(key INT, value STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest1_n13
-POSTHOOK: query: CREATE TABLE dest1_n13(key INT, value STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest1_n13
-PREHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey LIMIT 20
-) tmap
-INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n13
-POSTHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey LIMIT 20
-) tmap
-INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n13
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 20
-            Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              null sort order: a
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-              TopN Hash Memory Usage: 0.1
-              value expressions: _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 20
-            Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: (_col0 < 100) (type: boolean)
-              Statistics: Num rows: 6 Data size: 1068 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest1_n13
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n13
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-
-PREHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey LIMIT 20
-) tmap
-INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n13
-POSTHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey LIMIT 20
-) tmap
-INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n13
-POSTHOOK: Lineage: dest1_n13.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: dest1_n13.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT dest1_n13.* FROM dest1_n13
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest1_n13
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT dest1_n13.* FROM dest1_n13
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest1_n13
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-11	val_11
diff --git a/ql/src/test/results/clientpositive/input17.q.out b/ql/src/test/results/clientpositive/input17.q.out
deleted file mode 100644
index 9c03f5b..0000000
--- a/ql/src/test/results/clientpositive/input17.q.out
+++ /dev/null
@@ -1,174 +0,0 @@
-PREHOOK: query: CREATE TABLE dest1_n81(key INT, value STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest1_n81
-POSTHOOK: query: CREATE TABLE dest1_n81(key INT, value STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest1_n81
-PREHOOK: query: EXPLAIN
-FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-PREHOOK: Output: default@dest1_n81
-POSTHOOK: query: EXPLAIN
-FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_thrift
-POSTHOOK: Output: default@dest1_n81
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src_thrift
-            Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: (aint + lint[0]) (type: int), lintstring[0] (type: struct<myint:int,mystring:string,underscore_int:int>)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n81
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: string)
-            outputColumnNames: key, value
-            Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1 Data size: 3548 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n81
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.dest1_n81
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 3548 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 3564 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 3564 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-PREHOOK: Output: default@dest1_n81
-POSTHOOK: query: FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_thrift
-POSTHOOK: Output: default@dest1_n81
-POSTHOOK: Lineage: dest1_n81.key SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
-POSTHOOK: Lineage: dest1_n81.value SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
-PREHOOK: query: SELECT dest1_n81.* FROM dest1_n81
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest1_n81
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT dest1_n81.* FROM dest1_n81
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest1_n81
-#### A masked pattern was here ####
--1461153966	{"myint":49,"mystring":"343","underscore_int":7}
--1952710705	{"myint":25,"mystring":"125","underscore_int":5}
--734328905	{"myint":16,"mystring":"64","underscore_int":4}
--751827636	{"myint":4,"mystring":"8","underscore_int":2}
-1244525196	{"myint":36,"mystring":"216","underscore_int":6}
-1638581586	{"myint":64,"mystring":"512","underscore_int":8}
-1712634731	{"myint":0,"mystring":"0","underscore_int":0}
-336964422	{"myint":81,"mystring":"729","underscore_int":9}
-465985201	{"myint":1,"mystring":"1","underscore_int":1}
-477111225	{"myint":9,"mystring":"27","underscore_int":3}
-NULL	NULL
diff --git a/ql/src/test/results/clientpositive/input18.q.out b/ql/src/test/results/clientpositive/input18.q.out
deleted file mode 100644
index ce731e6..0000000
--- a/ql/src/test/results/clientpositive/input18.q.out
+++ /dev/null
@@ -1,250 +0,0 @@
-PREHOOK: query: CREATE TABLE dest1_n124(key INT, value STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest1_n124
-POSTHOOK: query: CREATE TABLE dest1_n124(key INT, value STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest1_n124
-PREHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
-         USING 'cat'
-  CLUSTER BY key
-) tmap
-INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n124
-POSTHOOK: query: EXPLAIN
-FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
-         USING 'cat'
-  CLUSTER BY key
-) tmap
-INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n124
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: (_col0 < 100) (type: boolean)
-                  Statistics: Num rows: 166 Data size: 30876 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    null sort order: a
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 166 Data size: 30876 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: UDFToInteger(VALUE._col0) (type: int), regexp_replace(VALUE._col1, '	', '+') (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n124
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: string)
-            outputColumnNames: key, value
-            Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n124
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.dest1_n124
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
-         USING 'cat'
-  CLUSTER BY key
-) tmap
-INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@dest1_n124
-POSTHOOK: query: FROM (
-  FROM src
-  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
-         USING 'cat'
-  CLUSTER BY key
-) tmap
-INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@dest1_n124
-POSTHOOK: Lineage: dest1_n124.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: dest1_n124.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT dest1_n124.* FROM dest1_n124
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest1_n124
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT dest1_n124.* FROM dest1_n124
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest1_n124
-#### A masked pattern was here ####
-0	val_0+3+7
-0	val_0+3+7
-0	val_0+3+7
-10	val_10+3+7
-11	val_11+3+7
-12	val_12+3+7
-12	val_12+3+7
-15	val_15+3+7
-15	val_15+3+7
-17	val_17+3+7
-18	val_18+3+7
-18	val_18+3+7
-19	val_19+3+7
-2	val_2+3+7
-20	val_20+3+7
-24	val_24+3+7
-24	val_24+3+7
-26	val_26+3+7
-26	val_26+3+7
-27	val_27+3+7
-28	val_28+3+7
-30	val_30+3+7
-33	val_33+3+7
-34	val_34+3+7
-35	val_35+3+7
-35	val_35+3+7
-35	val_35+3+7
-37	val_37+3+7
-37	val_37+3+7
-4	val_4+3+7
-41	val_41+3+7
-42	val_42+3+7
-42	val_42+3+7
-43	val_43+3+7
-44	val_44+3+7
-47	val_47+3+7
-5	val_5+3+7
-5	val_5+3+7
-5	val_5+3+7
-51	val_51+3+7
-51	val_51+3+7
-53	val_53+3+7
-54	val_54+3+7
-57	val_57+3+7
-58	val_58+3+7
-58	val_58+3+7
-64	val_64+3+7
-65	val_65+3+7
-66	val_66+3+7
-67	val_67+3+7
-67	val_67+3+7
-69	val_69+3+7
-70	val_70+3+7
-70	val_70+3+7
-70	val_70+3+7
-72	val_72+3+7
-72	val_72+3+7
-74	val_74+3+7
-76	val_76+3+7
-76	val_76+3+7
-77	val_77+3+7
-78	val_78+3+7
-8	val_8+3+7
-80	val_80+3+7
-82	val_82+3+7
-83	val_83+3+7
-83	val_83+3+7
-84	val_84+3+7
-84	val_84+3+7
-85	val_85+3+7
-86	val_86+3+7
-87	val_87+3+7
-9	val_9+3+7
-90	val_90+3+7
-90	val_90+3+7
-90	val_90+3+7
-92	val_92+3+7
-95	val_95+3+7
-95	val_95+3+7
-96	val_96+3+7
-97	val_97+3+7
-97	val_97+3+7
-98	val_98+3+7
-98	val_98+3+7
diff --git a/ql/src/test/results/clientpositive/input5.q.out b/ql/src/test/results/clientpositive/input5.q.out
deleted file mode 100644
index becfc18..0000000
--- a/ql/src/test/results/clientpositive/input5.q.out
+++ /dev/null
@@ -1,174 +0,0 @@
-PREHOOK: query: CREATE TABLE dest1_n94(key STRING, value STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest1_n94
-POSTHOOK: query: CREATE TABLE dest1_n94(key STRING, value STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest1_n94
-PREHOOK: query: EXPLAIN
-FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-PREHOOK: Output: default@dest1_n94
-POSTHOOK: query: EXPLAIN
-FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_thrift
-POSTHOOK: Output: default@dest1_n94
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src_thrift
-            Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: lint (type: array<int>), lintstring (type: array<struct<myint:int,mystring:string,underscore_int:int>>)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n94
-          Select Operator
-            expressions: _col0 (type: string), _col1 (type: string)
-            outputColumnNames: key, value
-            Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n94
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: string, string
-          Table: default.dest1_n94
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_thrift
-PREHOOK: Output: default@dest1_n94
-POSTHOOK: query: FROM (
-  FROM src_thrift
-  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
-         USING 'cat' AS (tkey, tvalue) 
-  CLUSTER BY tkey 
-) tmap
-INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_thrift
-POSTHOOK: Output: default@dest1_n94
-POSTHOOK: Lineage: dest1_n94.key SCRIPT [(src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
-POSTHOOK: Lineage: dest1_n94.value SCRIPT [(src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
-PREHOOK: query: SELECT dest1_n94.* FROM dest1_n94
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest1_n94
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT dest1_n94.* FROM dest1_n94
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest1_n94
-#### A masked pattern was here ####
-NULL	NULL
-[0,0,0]	[{"myint":0,"mystring":"0","underscore_int":0}]
-[1,2,3]	[{"myint":1,"mystring":"1","underscore_int":1}]
-[2,4,6]	[{"myint":4,"mystring":"8","underscore_int":2}]
-[3,6,9]	[{"myint":9,"mystring":"27","underscore_int":3}]
-[4,8,12]	[{"myint":16,"mystring":"64","underscore_int":4}]
-[5,10,15]	[{"myint":25,"mystring":"125","underscore_int":5}]
-[6,12,18]	[{"myint":36,"mystring":"216","underscore_int":6}]
-[7,14,21]	[{"myint":49,"mystring":"343","underscore_int":7}]
-[8,16,24]	[{"myint":64,"mystring":"512","underscore_int":8}]
-[9,18,27]	[{"myint":81,"mystring":"729","underscore_int":9}]
diff --git a/ql/src/test/results/clientpositive/insert_into3.q.out b/ql/src/test/results/clientpositive/insert_into3.q.out
deleted file mode 100644
index 60fd42d..0000000
--- a/ql/src/test/results/clientpositive/insert_into3.q.out
+++ /dev/null
@@ -1,568 +0,0 @@
-PREHOOK: query: DROP TABLE insert_into3a
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into3a
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE insert_into3b
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into3b
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE insert_into3a (key int, value string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into3a
-POSTHOOK: query: CREATE TABLE insert_into3a (key int, value string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into3a
-PREHOOK: query: CREATE TABLE insert_into3b (key int, value string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: CREATE TABLE insert_into3b (key int, value string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into3b
-PREHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
-                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into3a
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
-                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into3a
-POSTHOOK: Output: default@insert_into3b
-STAGE DEPENDENCIES:
-  Stage-2 is a root stage
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0, Stage-4, Stage-7
-  Stage-4 depends on stages: Stage-2
-  Stage-6 depends on stages: Stage-1, Stage-4, Stage-7
-  Stage-5 depends on stages: Stage-2
-  Stage-1 depends on stages: Stage-5
-  Stage-7 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Reduce Output Operator
-                key expressions: _col0 (type: string), _col1 (type: string)
-                null sort order: zz
-                sort order: ++
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                TopN Hash Memory Usage: 0.1
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 50
-            Statistics: Num rows: 50 Data size: 8900 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into3a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into3a
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into3a
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-6
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into3b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string)
-              null sort order: zz
-              sort order: ++
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              TopN Hash Memory Usage: 0.1
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 100
-            Statistics: Num rows: 100 Data size: 17800 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into3b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into3b
-
-  Stage: Stage-7
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
-         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into3a
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
-         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into3a
-POSTHOOK: Output: default@insert_into3b
-POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into3a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into3a
-#### A masked pattern was here ####
--1254133670
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into3b
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into3b
-#### A masked pattern was here ####
--1142373758
-PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
-                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into3a
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
-                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into3a
-POSTHOOK: Output: default@insert_into3b
-STAGE DEPENDENCIES:
-  Stage-2 is a root stage
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0, Stage-4, Stage-7
-  Stage-4 depends on stages: Stage-2
-  Stage-6 depends on stages: Stage-1, Stage-4, Stage-7
-  Stage-5 depends on stages: Stage-2
-  Stage-1 depends on stages: Stage-5
-  Stage-7 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 10
-                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: string), _col1 (type: string)
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 10
-                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into3a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into3a
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into3a
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-6
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into3b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-              TopN Hash Memory Usage: 0.1
-              value expressions: _col0 (type: string), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into3b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into3b
-
-  Stage: Stage-7
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
-         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into3a
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
-         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into3a
-POSTHOOK: Output: default@insert_into3b
-POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into3a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into3a
-#### A masked pattern was here ####
--826625916
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into3b
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into3b
-#### A masked pattern was here ####
--1968999674
-PREHOOK: query: DROP TABLE insert_into3a
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into3a
-PREHOOK: Output: default@insert_into3a
-POSTHOOK: query: DROP TABLE insert_into3a
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into3a
-POSTHOOK: Output: default@insert_into3a
-PREHOOK: query: DROP TABLE insert_into3b
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into3b
-PREHOOK: Output: default@insert_into3b
-POSTHOOK: query: DROP TABLE insert_into3b
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into3b
-POSTHOOK: Output: default@insert_into3b
diff --git a/ql/src/test/results/clientpositive/insert_into4.q.out b/ql/src/test/results/clientpositive/insert_into4.q.out
deleted file mode 100644
index 031d562..0000000
--- a/ql/src/test/results/clientpositive/insert_into4.q.out
+++ /dev/null
@@ -1,460 +0,0 @@
-PREHOOK: query: DROP TABLE insert_into4a
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into4a
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE insert_into4b
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into4b
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE insert_into4a (key int, value string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: CREATE TABLE insert_into4a (key int, value string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into4a
-PREHOOK: query: CREATE TABLE insert_into4b (key int, value string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into4b
-POSTHOOK: query: CREATE TABLE insert_into4b (key int, value string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into4b
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into4a
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 10
-                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into4a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into4a
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into4a
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into4a
-POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into4a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into4a
-#### A masked pattern was here ####
--826625916
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into4a
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 10
-                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into4a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into4a
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into4a
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into4a
-POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into4a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into4a
-#### A masked pattern was here ####
--1653251832
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into4a
-PREHOOK: Output: default@insert_into4b
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into4a
-POSTHOOK: Output: default@insert_into4b
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: insert_into4a
-            Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into4b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    null sort order: 
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into4b
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into4b
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into4b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into4b
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into4a
-PREHOOK: Output: default@insert_into4b
-POSTHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into4a
-POSTHOOK: Output: default@insert_into4b
-POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into4b
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into4b
-#### A masked pattern was here ####
--1653251832
-PREHOOK: query: DROP TABLE insert_into4a
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into4a
-PREHOOK: Output: default@insert_into4a
-POSTHOOK: query: DROP TABLE insert_into4a
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into4a
-POSTHOOK: Output: default@insert_into4a
-PREHOOK: query: DROP TABLE insert_into4b
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into4b
-PREHOOK: Output: default@insert_into4b
-POSTHOOK: query: DROP TABLE insert_into4b
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into4b
-POSTHOOK: Output: default@insert_into4b
diff --git a/ql/src/test/results/clientpositive/insert_into5.q.out b/ql/src/test/results/clientpositive/insert_into5.q.out
deleted file mode 100644
index 8ca94ee..0000000
--- a/ql/src/test/results/clientpositive/insert_into5.q.out
+++ /dev/null
@@ -1,637 +0,0 @@
-PREHOOK: query: DROP TABLE insert_into5a
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into5a
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE insert_into5b
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into5b
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE insert_into5a (key int, value string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: CREATE TABLE insert_into5a (key int, value string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into5a
-PREHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into5b
-POSTHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into5b
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into5a
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: 1 (type: int), 'one' (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 10
-                Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: int), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: int), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5a
-            Select Operator
-              expressions: _col0 (type: int), _col1 (type: string)
-              outputColumnNames: key, value
-              Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-              Group By Operator
-                aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                minReductionHashAggr: 0.99
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into5a
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into5a
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into5a
-POSTHOOK: Lineage: insert_into5a.key SIMPLE []
-POSTHOOK: Lineage: insert_into5a.value SIMPLE []
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-#### A masked pattern was here ####
-481928560
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-POSTHOOK: Output: default@insert_into5a
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: insert_into5a
-            Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into5a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    null sort order: 
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into5a
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into5a
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5a
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5a
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-POSTHOOK: Output: default@insert_into5a
-POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-#### A masked pattern was here ####
-963857120
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
-  SELECT * FROM insert_into5a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-PREHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
-  SELECT * FROM insert_into5a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-POSTHOOK: Output: default@insert_into5b@ds=1
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: insert_into5a
-            Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into5b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
-                outputColumnNames: key, value, ds
-                Statistics: Num rows: 20 Data size: 3520 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  keys: ds (type: string)
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    null sort order: z
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
-            outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 1
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into5b
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into5b
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5b
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5a
-PREHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5a
-POSTHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5b
-PREHOOK: Input: default@insert_into5b@ds=1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5b
-POSTHOOK: Input: default@insert_into5b@ds=1
-#### A masked pattern was here ####
--18626052920
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
-  SELECT key, value FROM insert_into5b
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5b
-PREHOOK: Input: default@insert_into5b@ds=1
-PREHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
-  SELECT key, value FROM insert_into5b
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5b
-POSTHOOK: Input: default@insert_into5b@ds=1
-POSTHOOK: Output: default@insert_into5b@ds=1
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: insert_into5b
-            Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into5b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
-                outputColumnNames: key, value, ds
-                Statistics: Num rows: 20 Data size: 3520 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  keys: ds (type: string)
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    null sort order: z
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
-            outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 1
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into5b
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into5b
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into5b
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
-  SELECT key, value FROM insert_into5b
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5b
-PREHOOK: Input: default@insert_into5b@ds=1
-PREHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
-  SELECT key, value FROM insert_into5b
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5b
-POSTHOOK: Input: default@insert_into5b@ds=1
-POSTHOOK: Output: default@insert_into5b@ds=1
-POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into5b
-PREHOOK: Input: default@insert_into5b@ds=1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into5b
-POSTHOOK: Input: default@insert_into5b@ds=1
-#### A masked pattern was here ####
--37252105840
-PREHOOK: query: DROP TABLE insert_into5a
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into5a
-PREHOOK: Output: default@insert_into5a
-POSTHOOK: query: DROP TABLE insert_into5a
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into5a
-POSTHOOK: Output: default@insert_into5a
diff --git a/ql/src/test/results/clientpositive/insert_into6.q.out b/ql/src/test/results/clientpositive/insert_into6.q.out
deleted file mode 100644
index 2c6cab5..0000000
--- a/ql/src/test/results/clientpositive/insert_into6.q.out
+++ /dev/null
@@ -1,383 +0,0 @@
-PREHOOK: query: DROP TABLE insert_into6a
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into6a
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE insert_into6b
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into6b
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into6a
-POSTHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into6a
-PREHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into6b
-POSTHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into6b
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
-    SELECT * FROM src LIMIT 150
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into6a@ds=1
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
-    SELECT * FROM src LIMIT 150
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into6a@ds=1
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Limit
-                Number of rows: 150
-                Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
-          Limit
-            Number of rows: 150
-            Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into6a
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
-                outputColumnNames: key, value, ds
-                Statistics: Num rows: 150 Data size: 27000 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  keys: ds (type: string)
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 1
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into6a
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into6a
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              null sort order: z
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
-            outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into6a@ds=1
-POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into6a@ds=1
-POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@insert_into6a@ds=2
-POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@insert_into6a@ds=2
-POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into6a
-PREHOOK: Input: default@insert_into6a@ds=1
-PREHOOK: Input: default@insert_into6a@ds=2
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into6a
-POSTHOOK: Input: default@insert_into6a@ds=1
-POSTHOOK: Input: default@insert_into6a@ds=2
-#### A masked pattern was here ####
--35226404960
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
-    SELECT * FROM insert_into6a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into6a
-PREHOOK: Input: default@insert_into6a@ds=1
-PREHOOK: Input: default@insert_into6a@ds=2
-PREHOOK: Output: default@insert_into6b
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
-    SELECT * FROM insert_into6a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into6a
-POSTHOOK: Input: default@insert_into6a@ds=1
-POSTHOOK: Input: default@insert_into6a@ds=2
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: insert_into6a
-            Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: int), value (type: string), ds (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.insert_into6b
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string)
-                outputColumnNames: key, value, ds
-                Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  keys: ds (type: string)
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 2 Data size: 2096 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    null sort order: z
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 2 Data size: 2096 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
-            outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.insert_into6b
-
-  Stage: Stage-2
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into6b
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into6b
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.insert_into6b
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into6a
-PREHOOK: Input: default@insert_into6a@ds=1
-PREHOOK: Input: default@insert_into6a@ds=2
-PREHOOK: Output: default@insert_into6b
-POSTHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into6a
-POSTHOOK: Input: default@insert_into6a@ds=1
-POSTHOOK: Input: default@insert_into6a@ds=2
-POSTHOOK: Output: default@insert_into6b@ds=1
-POSTHOOK: Output: default@insert_into6b@ds=2
-POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
-POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
-POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
-) t
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into6b
-PREHOOK: Input: default@insert_into6b@ds=1
-PREHOOK: Input: default@insert_into6b@ds=2
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
-    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
-) t
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into6b
-POSTHOOK: Input: default@insert_into6b@ds=1
-POSTHOOK: Input: default@insert_into6b@ds=2
-#### A masked pattern was here ####
--35226404960
-PREHOOK: query: SHOW PARTITIONS insert_into6b
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@insert_into6b
-POSTHOOK: query: SHOW PARTITIONS insert_into6b
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@insert_into6b
-ds=1
-ds=2
-PREHOOK: query: DROP TABLE insert_into6a
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into6a
-PREHOOK: Output: default@insert_into6a
-POSTHOOK: query: DROP TABLE insert_into6a
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into6a
-POSTHOOK: Output: default@insert_into6a
-PREHOOK: query: DROP TABLE insert_into6b
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into6b
-PREHOOK: Output: default@insert_into6b
-POSTHOOK: query: DROP TABLE insert_into6b
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into6b
-POSTHOOK: Output: default@insert_into6b
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_6.q.out b/ql/src/test/results/clientpositive/llap/autoColumnStats_6.q.out
similarity index 71%
rename from ql/src/test/results/clientpositive/autoColumnStats_6.q.out
rename to ql/src/test/results/clientpositive/llap/autoColumnStats_6.q.out
index ff708cb..b674164 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_6.q.out
+++ b/ql/src/test/results/clientpositive/llap/autoColumnStats_6.q.out
@@ -35,83 +35,87 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(key) (type: int), value (type: string), CAST( (hash(key) pmod 10) AS STRING) (type: string), CAST( (hash(value) pmod 10) AS STRING) (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), _col2 (type: string), _col3 (type: string)
-                outputColumnNames: key, value, one, two, three
-                Statistics: Num rows: 500 Data size: 274000 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  keys: one (type: string), two (type: string), three (type: string)
-                  minReductionHashAggr: 0.99
-                  mode: hash
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(key) (type: int), value (type: string), CAST( (hash(key) pmod 10) AS STRING) (type: string), CAST( (hash(value) pmod 10) AS STRING) (type: string)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string), _col2 (type: string), _col3 (type: string)
+                      outputColumnNames: key, value, one, two, three
+                      Statistics: Num rows: 500 Data size: 274000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        keys: one (type: string), two (type: string), three (type: string)
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                        Statistics: Num rows: 500 Data size: 658500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                          null sort order: zzz
+                          sort order: +++
+                          Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                          Statistics: Num rows: 500 Data size: 658500 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+                    Reduce Output Operator
+                      key expressions: _col2 (type: string), _col3 (type: string)
+                      null sort order: aa
+                      sort order: ++
+                      Map-reduce partition columns: _col2 (type: string), _col3 (type: string)
+                      Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: int), _col1 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                Statistics: Num rows: 500 Data size: 666500 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: string), _col2 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                  Statistics: Num rows: 250 Data size: 329250 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
-                    null sort order: zzz
-                    sort order: +++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
-                    Statistics: Num rows: 250 Data size: 329250 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 250 Data size: 333250 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: string), _col2 (type: string)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4
-            Statistics: Num rows: 250 Data size: 333250 Basic stats: COMPLETE Column stats: COMPLETE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 250 Data size: 333250 Basic stats: COMPLETE Column stats: COMPLETE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  Statistics: Num rows: 500 Data size: 666500 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 500 Data size: 666500 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 3 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                File Output Operator
+                  compressed: false
+                  Dp Sort State: PARTITION_SORTED
+                  Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                      name: default.orcfile_merge2a
 
   Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col2 (type: string), _col3 (type: string)
-              null sort order: aa
-              sort order: ++
-              Map-reduce partition columns: _col2 (type: string), _col3 (type: string)
-              Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: int), _col1 (type: string)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: int), VALUE._col1 (type: string), KEY._col2 (type: string), KEY._col3 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          File Output Operator
-            compressed: false
-            Dp Sort State: PARTITION_SORTED
-            Statistics: Num rows: 500 Data size: 231500 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                name: default.orcfile_merge2a
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -209,7 +213,7 @@ POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=1).value SIMPLE [
 POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=7).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=7).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 Found 1 items
-#### A masked pattern was here ####
+-rw-rw-rw-   3 ### USER ### ### GROUP ###        351 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
     FROM orcfile_merge2a
@@ -236,7 +240,7 @@ PREHOOK: Input: default@orcfile_merge2a@one=1/two=8/three=0
 PREHOOK: Input: default@orcfile_merge2a@one=1/two=8/three=6
 PREHOOK: Input: default@orcfile_merge2a@one=1/two=9/three=1
 PREHOOK: Input: default@orcfile_merge2a@one=1/two=9/three=7
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
     FROM orcfile_merge2a
@@ -263,7 +267,7 @@ POSTHOOK: Input: default@orcfile_merge2a@one=1/two=8/three=0
 POSTHOOK: Input: default@orcfile_merge2a@one=1/two=8/three=6
 POSTHOOK: Input: default@orcfile_merge2a@one=1/two=9/three=1
 POSTHOOK: Input: default@orcfile_merge2a@one=1/two=9/three=7
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 -4209012844
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(key, value, '1', PMOD(HASH(key), 10), 
@@ -272,7 +276,7 @@ PREHOOK: query: SELECT SUM(HASH(c)) FROM (
 ) t
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(key, value, '1', PMOD(HASH(key), 10), 
         PMOD(HASH(value), 10)) USING 'tr \t _' AS (c)
@@ -280,7 +284,7 @@ POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
 ) t
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 -4209012844
 PREHOOK: query: DROP TABLE orcfile_merge2a
 PREHOOK: type: DROPTABLE
diff --git a/ql/src/test/results/clientpositive/autogen_colalias.q.out b/ql/src/test/results/clientpositive/llap/autogen_colalias.q.out
similarity index 100%
rename from ql/src/test/results/clientpositive/autogen_colalias.q.out
rename to ql/src/test/results/clientpositive/llap/autogen_colalias.q.out
diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/llap/binary_output_format.q.out
similarity index 50%
rename from ql/src/test/results/clientpositive/binary_output_format.q.out
rename to ql/src/test/results/clientpositive/llap/binary_output_format.q.out
index 760d863..0b7a75e 100644
--- a/ql/src/test/results/clientpositive/binary_output_format.q.out
+++ b/ql/src/test/results/clientpositive/llap/binary_output_format.q.out
@@ -54,170 +54,171 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n109
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          properties:
+                            bucketing_version -1
+                            columns _col0
+                            columns.types string
+                            field.delim 9
+                            serialization.format 9
+                            serialization.last.column.takes.rest true
+                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        bucketingVersion: 2
+                        compressed: false
+                        GlobalTableId: 1
+                        directory: hdfs://### HDFS PATH ###
+                        NumFilesPerFileSink: 1
+                        Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                        Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
+                            properties:
+                              bucketing_version 2
+                              column.name.delimiter ,
+                              columns mydata
+                              columns.comments 
+                              columns.types string
+#### A masked pattern was here ####
+                              location hdfs://### HDFS PATH ###
+                              name default.dest1_n109
+                              serialization.format 1
+                              serialization.last.column.takes.rest true
+                              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest1_n109
+                        TotalFiles: 1
+                        GatherStats: true
+                        MultiFileSpray: false
+                      Select Operator
+                        expressions: _col0 (type: string)
+                        outputColumnNames: mydata
+                        Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                        Group By Operator
+                          aggregations: compute_stats(mydata, 'hll')
+                          minReductionHashAggr: 0.99
+                          mode: hash
+                          outputColumnNames: _col0
+                          Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
+                          Reduce Output Operator
+                            bucketingVersion: 2
+                            null sort order: 
+                            numBuckets: -1
+                            sort order: 
+                            Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
+                            tag: -1
+                            value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+                            auto parallelism: false
+            Path -> Alias:
+              hdfs://### HDFS PATH ### [src]
+            Path -> Partition:
+              hdfs://### HDFS PATH ### 
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      bucketing_version -1
-                      columns _col0
-                      columns.types string
-                      field.delim 9
-                      serialization.format 9
-                      serialization.last.column.takes.rest true
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
+                      name default.src
+                      serialization.format 1
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   bucketingVersion: 2
                   compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
+                  GlobalTableId: 0
+                  directory: hdfs://### HDFS PATH ###
                   NumFilesPerFileSink: 1
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
+                  Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
+                  Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                   table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       properties:
-                        bucketing_version 2
-                        column.name.delimiter ,
-                        columns mydata
-                        columns.comments 
-                        columns.types string
-#### A masked pattern was here ####
-                        name default.dest1_n109
+                        bucketing_version -1
+                        columns _col0
+                        columns.types struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.escape.crlf true
                         serialization.format 1
-                        serialization.last.column.takes.rest true
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest1_n109
                   TotalFiles: 1
-                  GatherStats: true
+                  GatherStats: false
                   MultiFileSpray: false
-                Select Operator
-                  expressions: _col0 (type: string)
-                  outputColumnNames: mydata
-                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(mydata, 'hll')
-                    minReductionHashAggr: 0.99
-                    mode: hash
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      bucketingVersion: 2
-                      null sort order: 
-                      numBuckets: -1
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
-                      tag: -1
-                      value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-                      auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              bucket_count -1
-              bucketing_version 2
-              column.name.delimiter ,
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucketing_version 2
-                column.name.delimiter ,
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-      Needs Tagging: false
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            bucketingVersion: 2
-            compressed: false
-            GlobalTableId: 0
-#### A masked pattern was here ####
-            NumFilesPerFileSink: 1
-            Statistics: Num rows: 1 Data size: 440 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                properties:
-                  bucketing_version -1
-                  columns _col0
-                  columns.types struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>
-                  escape.delim \
-                  hive.serialization.extend.additional.nesting.levels true
-                  serialization.escape.crlf true
-                  serialization.format 1
-                  serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            TotalFiles: 1
-            GatherStats: false
-            MultiFileSpray: false
 
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
       tables:
           replace: true
-#### A masked pattern was here ####
+          source: hdfs://### HDFS PATH ###
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
@@ -228,6 +229,7 @@ STAGE PLANS:
                 columns.comments 
                 columns.types string
 #### A masked pattern was here ####
+                location hdfs://### HDFS PATH ###
                 name default.dest1_n109
                 serialization.format 1
                 serialization.last.column.takes.rest true
@@ -235,162 +237,16 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n109
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
-#### A masked pattern was here ####
+          Stats Aggregation Key Prefix: hdfs://### HDFS PATH ###
       Column Stats Desc:
           Columns: mydata
           Column Types: string
           Table: default.dest1_n109
           Is Table Level Stats: true
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            File Output Operator
-              bucketingVersion: 2
-              compressed: false
-              GlobalTableId: 0
-#### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-                  properties:
-                    bucketing_version 2
-                    column.name.delimiter ,
-                    columns mydata
-                    columns.comments 
-                    columns.types string
-#### A masked pattern was here ####
-                    name default.dest1_n109
-                    serialization.format 1
-                    serialization.last.column.takes.rest true
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n109
-              TotalFiles: 1
-              GatherStats: false
-              MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -ext-10002
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-            properties:
-              bucketing_version 2
-              column.name.delimiter ,
-              columns mydata
-              columns.comments 
-              columns.types string
-#### A masked pattern was here ####
-              name default.dest1_n109
-              serialization.format 1
-              serialization.last.column.takes.rest true
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-              properties:
-                bucketing_version 2
-                column.name.delimiter ,
-                columns mydata
-                columns.comments 
-                columns.types string
-#### A masked pattern was here ####
-                name default.dest1_n109
-                serialization.format 1
-                serialization.last.column.takes.rest true
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n109
-            name: default.dest1_n109
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            File Output Operator
-              bucketingVersion: 2
-              compressed: false
-              GlobalTableId: 0
-#### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-                  properties:
-                    bucketing_version 2
-                    column.name.delimiter ,
-                    columns mydata
-                    columns.comments 
-                    columns.types string
-#### A masked pattern was here ####
-                    name default.dest1_n109
-                    serialization.format 1
-                    serialization.last.column.takes.rest true
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n109
-              TotalFiles: 1
-              GatherStats: false
-              MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -ext-10002
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-            properties:
-              bucketing_version 2
-              column.name.delimiter ,
-              columns mydata
-              columns.comments 
-              columns.types string
-#### A masked pattern was here ####
-              name default.dest1_n109
-              serialization.format 1
-              serialization.last.column.takes.rest true
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat
-              properties:
-                bucketing_version 2
-                column.name.delimiter ,
-                columns mydata
-                columns.comments 
-                columns.types string
-#### A masked pattern was here ####
-                name default.dest1_n109
-                serialization.format 1
-                serialization.last.column.takes.rest true
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest1_n109
-            name: default.dest1_n109
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
 PREHOOK: query: INSERT OVERWRITE TABLE dest1_n109
 SELECT TRANSFORM(*)
   USING 'cat'
@@ -423,11 +279,11 @@ POSTHOOK: Lineage: dest1_n109.mydata SCRIPT [(src)src.FieldSchema(name:key, type
 PREHOOK: query: SELECT * FROM dest1_n109
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n109
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM dest1_n109
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n109
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 238	val_238
 86	val_86
 311	val_311
diff --git a/ql/src/test/results/clientpositive/llap/create_genericudaf.q.out b/ql/src/test/results/clientpositive/llap/create_genericudaf.q.out
new file mode 100644
index 0000000..a87f04a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/create_genericudaf.q.out
@@ -0,0 +1,114 @@
+PREHOOK: query: EXPLAIN
+CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: test_avg
+POSTHOOK: query: EXPLAIN
+CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: test_avg
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage
+      name: test_avg
+      temporary: true
+
+PREHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: test_avg
+POSTHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: test_avg
+PREHOOK: query: EXPLAIN
+SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: EXPLAIN
+SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: substr(value, 5) (type: string)
+                    outputColumnNames: _col1
+                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: test_avg(1), test_avg(_col1)
+                      minReductionHashAggr: 0.99
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 332 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: struct<count:bigint,sum:double,input:int>), _col1 (type: struct<count:bigint,sum:double,input:string>)
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: test_avg(VALUE._col0), test_avg(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT
+    test_avg(1),
+    test_avg(substr(value,5))
+FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1.0	260.182
+PREHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: test_avg
+POSTHOOK: query: DROP TEMPORARY FUNCTIOn test_avg
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: test_avg
diff --git a/ql/src/test/results/clientpositive/create_udaf.q.out b/ql/src/test/results/clientpositive/llap/create_udaf.q.out
similarity index 86%
rename from ql/src/test/results/clientpositive/create_udaf.q.out
rename to ql/src/test/results/clientpositive/llap/create_udaf.q.out
index 7bfce12..52e5fa6 100644
--- a/ql/src/test/results/clientpositive/create_udaf.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_udaf.q.out
@@ -42,56 +42,56 @@ POSTHOOK: Lineage: dest1_n34.col EXPRESSION [(src)src.FieldSchema(name:value, ty
 PREHOOK: query: SELECT dest1_n34.* FROM dest1_n34
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n34
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n34.* FROM dest1_n34
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n34
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7
 PREHOOK: query: SELECT test_max(CAST(length(src.value) AS SMALLINT)) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT test_max(CAST(length(src.value) AS SMALLINT)) FROM src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7
 PREHOOK: query: SELECT test_max(CAST(length(src.value) AS BIGINT)) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT test_max(CAST(length(src.value) AS BIGINT)) FROM src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7
 PREHOOK: query: SELECT test_max(CAST(length(src.value) AS DOUBLE)) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT test_max(CAST(length(src.value) AS DOUBLE)) FROM src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7.0
 PREHOOK: query: SELECT test_max(CAST(length(src.value) AS FLOAT)) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT test_max(CAST(length(src.value) AS FLOAT)) FROM src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7.0
 PREHOOK: query: SELECT test_max(substr(src.value,5)) FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT test_max(substr(src.value,5)) FROM src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 98
 PREHOOK: query: DROP TEMPORARY FUNCTION test_max
 PREHOOK: type: DROPFUNCTION
diff --git a/ql/src/test/results/clientpositive/create_view.q.out b/ql/src/test/results/clientpositive/llap/create_view.q.out
similarity index 94%
rename from ql/src/test/results/clientpositive/create_view.q.out
rename to ql/src/test/results/clientpositive/llap/create_view.q.out
index 9a251fc..52b77c7 100644
--- a/ql/src/test/results/clientpositive/create_view.q.out
+++ b/ql/src/test/results/clientpositive/llap/create_view.q.out
@@ -77,11 +77,11 @@ POSTHOOK: type: DROPFUNCTION
 PREHOOK: query: SELECT * FROM src WHERE key=86
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM src WHERE key=86
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86	val_86
 PREHOOK: query: CREATE VIEW view1 AS SELECT value FROM src WHERE key=86
 PREHOOK: type: CREATEVIEW
@@ -125,35 +125,35 @@ PREHOOK: query: SELECT * from view1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * from view1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 val_86
 PREHOOK: query: SELECT * from view2 where key=18
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * from view2 where key=18
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 18	val_18
 18	val_18
 PREHOOK: query: SELECT * from view3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * from view3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 VAL_86
 PREHOOK: query: EXPLAIN
 CREATE VIEW view0(valoo) AS SELECT upper(value) FROM src WHERE key=86
@@ -183,48 +183,32 @@ SELECT * from view2 where key=18
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: EXPLAIN
 SELECT * from view2 where key=18
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            filterExpr: (UDFToDouble(key) = 18.0D) (type: boolean)
-            properties:
-              insideView TRUE
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: (UDFToDouble(key) = 18.0D) (type: boolean)
-              Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 250 Data size: 44500 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        ListSink
+        TableScan
+          alias: src
+          filterExpr: (UDFToDouble(key) = 18.0D) (type: boolean)
+          properties:
+            insideView TRUE
+          Filter Operator
+            predicate: (UDFToDouble(key) = 18.0D) (type: boolean)
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              ListSink
 
 PREHOOK: query: SHOW TABLES 'view%'
 PREHOOK: type: SHOWTABLES
@@ -481,11 +465,11 @@ POSTHOOK: Lineage: table1_n4.key EXPRESSION [(src)src.FieldSchema(name:key, type
 PREHOOK: query: SELECT * FROM table1_n4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM table1_n4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86
 PREHOOK: query: CREATE VIEW view4 AS SELECT * FROM table1_n4
 PREHOOK: type: CREATEVIEW
@@ -502,12 +486,12 @@ PREHOOK: query: SELECT * FROM view4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
 PREHOOK: Input: default@view4
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
 POSTHOOK: Input: default@view4
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86
 PREHOOK: query: DESCRIBE view4
 PREHOOK: type: DESCTABLE
@@ -527,22 +511,22 @@ POSTHOOK: Output: default@table1_n4
 PREHOOK: query: SELECT * FROM table1_n4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM table1_n4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86	NULL
 PREHOOK: query: SELECT * FROM view4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
 PREHOOK: Input: default@view4
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
 POSTHOOK: Input: default@view4
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86
 PREHOOK: query: DESCRIBE table1_n4
 PREHOOK: type: DESCTABLE
@@ -575,19 +559,19 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@view5
 POSTHOOK: Lineage: view5.key1 SIMPLE [(table1_n4)table1_n4.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: view5.key2 SIMPLE [(table1_n4)table1_n4.FieldSchema(name:key, type:int, comment:null), ]
-Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: SELECT * FROM view5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
 PREHOOK: Input: default@view4
 PREHOOK: Input: default@view5
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
 POSTHOOK: Input: default@view4
 POSTHOOK: Input: default@view5
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86	86
 PREHOOK: query: DESCRIBE view5
 PREHOOK: type: DESCTABLE
@@ -641,12 +625,12 @@ PREHOOK: query: SELECT * FROM view7
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view7
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view7
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view7
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 82	val_82
 83	val_83
 83	val_83
@@ -661,12 +645,12 @@ PREHOOK: query: SELECT * FROM view7 ORDER BY key DESC, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view7
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view7 ORDER BY key DESC, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view7
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 90	val_90
 90	val_90
 87	val_87
@@ -681,12 +665,12 @@ PREHOOK: query: SELECT * FROM view7 LIMIT 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view7
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view7 LIMIT 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view7
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 82	val_82
 83	val_83
 83	val_83
@@ -696,12 +680,12 @@ PREHOOK: query: SELECT * FROM view7 LIMIT 20
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view7
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view7 LIMIT 20
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view7
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 82	val_82
 83	val_83
 83	val_83
@@ -780,12 +764,12 @@ PREHOOK: query: SELECT * FROM view8
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
 PREHOOK: Input: default@view8
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view8
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
 POSTHOOK: Input: default@view8
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 bbc
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_max AS
 'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
@@ -855,12 +839,12 @@ PREHOOK: query: SELECT * FROM view9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view9
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view9
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view9
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7
 PREHOOK: query: DROP VIEW view9
 PREHOOK: type: DROPVIEW
@@ -930,12 +914,12 @@ PREHOOK: query: SELECT * FROM view9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view9
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view9
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view9
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 7
 PREHOOK: query: CREATE VIEW view10 AS
 SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp
@@ -996,12 +980,12 @@ PREHOOK: query: SELECT * FROM view10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view10
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view10
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 86	val_86
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_explode AS
 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'
@@ -1071,12 +1055,12 @@ PREHOOK: query: SELECT * FROM view11
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1_n4
 PREHOOK: Input: default@view11
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view11
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1_n4
 POSTHOOK: Input: default@view11
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1
 2
 3
@@ -1143,26 +1127,26 @@ ORDER BY key ASC, myCol ASC LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view12
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view12
 ORDER BY key ASC, myCol ASC LIMIT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view12
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0	val_0	1
 PREHOOK: query: SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 ORDER BY key ASC, myCol ASC LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 ORDER BY key ASC, myCol ASC LIMIT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0	val_0	1
 PREHOOK: query: CREATE VIEW view13 AS
 SELECT s.key
@@ -1225,13 +1209,13 @@ ORDER BY key LIMIT 12
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcbucket
 PREHOOK: Input: default@view13
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view13
 ORDER BY key LIMIT 12
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket
 POSTHOOK: Input: default@view13
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1
 8
 8
@@ -1346,13 +1330,13 @@ ORDER BY k1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view14
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view14
 ORDER BY k1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view14
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0	val_0	0	val_0
 0	val_0	0	val_0
 0	val_0	0	val_0
@@ -1445,14 +1429,14 @@ LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view15
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view15
 ORDER BY value_count DESC, key
 LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view15
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 230	5
 348	5
 401	5
@@ -1525,14 +1509,14 @@ LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@view16
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM view16
 ORDER BY value
 LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@view16
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 val_0
 val_10
 val_100
@@ -1589,12 +1573,12 @@ PREHOOK: query: select * from view17
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Input: default@view17
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select * from view17
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Input: default@view17
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1
 PREHOOK: query: create view view18 as select v+1 from (select 1 as v) t
 PREHOOK: type: CREATEVIEW
@@ -1611,12 +1595,12 @@ PREHOOK: query: select * from view18
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Input: default@view18
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select * from view18
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Input: default@view18
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 2
 PREHOOK: query: create view if not exists view18 as select "should be ignored"
 PREHOOK: type: CREATEVIEW
diff --git a/ql/src/test/results/clientpositive/gen_udf_example_add10.q.out b/ql/src/test/results/clientpositive/llap/gen_udf_example_add10.q.out
similarity index 52%
rename from ql/src/test/results/clientpositive/gen_udf_example_add10.q.out
rename to ql/src/test/results/clientpositive/llap/gen_udf_example_add10.q.out
index bfe3139..cb7ea6d 100644
--- a/ql/src/test/results/clientpositive/gen_udf_example_add10.q.out
+++ b/ql/src/test/results/clientpositive/llap/gen_udf_example_add10.q.out
@@ -23,48 +23,62 @@ POSTHOOK: Output: default@t1_n102
 PREHOOK: query: explain select example_add10(x) as a,example_add10(y) as b from t1_n102 order by a desc,b limit 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t1_n102
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: explain select example_add10(x) as a,example_add10(y) as b from t1_n102 order by a desc,b limit 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t1_n102
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
   Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1_n102
-            Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: add10(x) (type: int), add10(y) (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: int), _col1 (type: double)
-                null sort order: zz
-                sort order: -+
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1_n102
+                  Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                  Top N Key Operator
+                    sort order: -+
+                    keys: add10(x) (type: int), add10(y) (type: double)
+                    null sort order: zz
+                    Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                    top n: 10
+                    Select Operator
+                      expressions: add10(x) (type: int), add10(y) (type: double)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int), _col1 (type: double)
+                        null sort order: zz
+                        sort order: -+
+                        Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                        TopN Hash Memory Usage: 0.1
+            Execution mode: vectorized
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: double)
+                outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-                TopN Hash Memory Usage: 0.1
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: double)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -75,11 +89,11 @@ STAGE PLANS:
 PREHOOK: query: select example_add10(x) as a,example_add10(y) as b from t1_n102 order by a desc,b limit 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t1_n102
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select example_add10(x) as a,example_add10(y) as b from t1_n102 order by a desc,b limit 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t1_n102
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 18	28.0
 18	38.0
 17	27.0
diff --git a/ql/src/test/results/clientpositive/groupby_bigdata.q.out b/ql/src/test/results/clientpositive/llap/groupby_bigdata.q.out
similarity index 82%
rename from ql/src/test/results/clientpositive/groupby_bigdata.q.out
rename to ql/src/test/results/clientpositive/llap/groupby_bigdata.q.out
index 90ccc8c..9a7ca2f 100644
--- a/ql/src/test/results/clientpositive/groupby_bigdata.q.out
+++ b/ql/src/test/results/clientpositive/llap/groupby_bigdata.q.out
@@ -2,10 +2,10 @@ PREHOOK: query: select count(distinct subq.key) from
 (FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select count(distinct subq.key) from
 (FROM src MAP src.key USING 'python dumpdata_script.py' AS key WHERE src.key = 10) subq
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1000022
diff --git a/ql/src/test/results/clientpositive/llap/input14.q.out b/ql/src/test/results/clientpositive/llap/input14.q.out
new file mode 100644
index 0000000..f3e5526
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/input14.q.out
@@ -0,0 +1,252 @@
+PREHOOK: query: CREATE TABLE dest1_n42(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1_n42
+POSTHOOK: query: CREATE TABLE dest1_n42(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1_n42
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n42
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n42
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Filter Operator
+                        predicate: (_col0 < 100) (type: boolean)
+                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: a
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n42
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string)
+                  outputColumnNames: key, value
+                  Statistics: Num rows: 166 Data size: 15770 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1_n42
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.dest1_n42
+
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n42
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n42 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n42
+POSTHOOK: Lineage: dest1_n42.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1_n42.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1_n42.* FROM dest1_n42
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1_n42
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT dest1_n42.* FROM dest1_n42
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1_n42
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+11	val_11
+12	val_12
+12	val_12
+15	val_15
+15	val_15
+17	val_17
+18	val_18
+18	val_18
+19	val_19
+2	val_2
+20	val_20
+24	val_24
+24	val_24
+26	val_26
+26	val_26
+27	val_27
+28	val_28
+30	val_30
+33	val_33
+34	val_34
+35	val_35
+35	val_35
+35	val_35
+37	val_37
+37	val_37
+4	val_4
+41	val_41
+42	val_42
+42	val_42
+43	val_43
+44	val_44
+47	val_47
+5	val_5
+5	val_5
+5	val_5
+51	val_51
+51	val_51
+53	val_53
+54	val_54
+57	val_57
+58	val_58
+58	val_58
+64	val_64
+65	val_65
+66	val_66
+67	val_67
+67	val_67
+69	val_69
+70	val_70
+70	val_70
+70	val_70
+72	val_72
+72	val_72
+74	val_74
+76	val_76
+76	val_76
+77	val_77
+78	val_78
+8	val_8
+80	val_80
+82	val_82
+83	val_83
+83	val_83
+84	val_84
+84	val_84
+85	val_85
+86	val_86
+87	val_87
+9	val_9
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+95	val_95
+95	val_95
+96	val_96
+97	val_97
+97	val_97
+98	val_98
+98	val_98
diff --git a/ql/src/test/results/clientpositive/llap/input14_limit.q.out b/ql/src/test/results/clientpositive/llap/input14_limit.q.out
new file mode 100644
index 0000000..25cfd31
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/input14_limit.q.out
@@ -0,0 +1,177 @@
+PREHOOK: query: CREATE TABLE dest1_n13(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1_n13
+POSTHOOK: query: CREATE TABLE dest1_n13(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1_n13
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n13
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n13
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Top N Key Operator
+                        sort order: +
+                        keys: _col0 (type: string)
+                        null sort order: a
+                        Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                        top n: 20
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: a
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                          TopN Hash Memory Usage: 0.1
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 20
+                  Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                  Top N Key Operator
+                    sort order: +
+                    keys: _col0 (type: string)
+                    null sort order: a
+                    Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                    top n: 20
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      null sort order: a
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                      TopN Hash Memory Usage: 0.1
+                      value expressions: _col1 (type: string)
+        Reducer 3 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 20
+                  Statistics: Num rows: 20 Data size: 3560 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col0 < 100) (type: boolean)
+                    Statistics: Num rows: 6 Data size: 1068 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest1_n13
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1_n13
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n13
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1_n13 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n13
+POSTHOOK: Lineage: dest1_n13.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1_n13.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1_n13.* FROM dest1_n13
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1_n13
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT dest1_n13.* FROM dest1_n13
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1_n13
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+11	val_11
diff --git a/ql/src/test/results/clientpositive/llap/input17.q.out b/ql/src/test/results/clientpositive/llap/input17.q.out
new file mode 100644
index 0000000..3074039
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/input17.q.out
@@ -0,0 +1,176 @@
+PREHOOK: query: CREATE TABLE dest1_n81(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1_n81
+POSTHOOK: query: CREATE TABLE dest1_n81(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1_n81
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1_n81
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1_n81
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src_thrift
+                  Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: (aint + lint[0]) (type: int), lintstring[0] (type: struct<myint:int,mystring:string,underscore_int:int>)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        null sort order: a
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n81
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string)
+                  outputColumnNames: key, value
+                  Statistics: Num rows: 11 Data size: 29524 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 3548 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 3548 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 3564 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 3564 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1_n81
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.dest1_n81
+
+PREHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1_n81
+POSTHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0])
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n81 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1_n81
+POSTHOOK: Lineage: dest1_n81.key SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+POSTHOOK: Lineage: dest1_n81.value SCRIPT [(src_thrift)src_thrift.FieldSchema(name:aint, type:int, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+PREHOOK: query: SELECT dest1_n81.* FROM dest1_n81
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1_n81
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT dest1_n81.* FROM dest1_n81
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1_n81
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1461153966	{"myint":49,"mystring":"343","underscore_int":7}
+-1952710705	{"myint":25,"mystring":"125","underscore_int":5}
+-734328905	{"myint":16,"mystring":"64","underscore_int":4}
+-751827636	{"myint":4,"mystring":"8","underscore_int":2}
+1244525196	{"myint":36,"mystring":"216","underscore_int":6}
+1638581586	{"myint":64,"mystring":"512","underscore_int":8}
+1712634731	{"myint":0,"mystring":"0","underscore_int":0}
+336964422	{"myint":81,"mystring":"729","underscore_int":9}
+465985201	{"myint":1,"mystring":"1","underscore_int":1}
+477111225	{"myint":9,"mystring":"27","underscore_int":3}
+NULL	NULL
diff --git a/ql/src/test/results/clientpositive/llap/input18.q.out b/ql/src/test/results/clientpositive/llap/input18.q.out
new file mode 100644
index 0000000..4623d8b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/input18.q.out
@@ -0,0 +1,252 @@
+PREHOOK: query: CREATE TABLE dest1_n124(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1_n124
+POSTHOOK: query: CREATE TABLE dest1_n124(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1_n124
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n124
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n124
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Filter Operator
+                        predicate: (_col0 < 100) (type: boolean)
+                        Statistics: Num rows: 166 Data size: 30876 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: a
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 166 Data size: 30876 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), regexp_replace(VALUE._col1, '	', '+') (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n124
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string)
+                  outputColumnNames: key, value
+                  Statistics: Num rows: 166 Data size: 31208 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1_n124
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.dest1_n124
+
+PREHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1_n124
+POSTHOOK: query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING 'cat'
+  CLUSTER BY key
+) tmap
+INSERT OVERWRITE TABLE dest1_n124 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1_n124
+POSTHOOK: Lineage: dest1_n124.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1_n124.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1_n124.* FROM dest1_n124
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1_n124
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT dest1_n124.* FROM dest1_n124
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1_n124
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0	val_0+3+7
+0	val_0+3+7
+0	val_0+3+7
+10	val_10+3+7
+11	val_11+3+7
+12	val_12+3+7
+12	val_12+3+7
+15	val_15+3+7
+15	val_15+3+7
+17	val_17+3+7
+18	val_18+3+7
+18	val_18+3+7
+19	val_19+3+7
+2	val_2+3+7
+20	val_20+3+7
+24	val_24+3+7
+24	val_24+3+7
+26	val_26+3+7
+26	val_26+3+7
+27	val_27+3+7
+28	val_28+3+7
+30	val_30+3+7
+33	val_33+3+7
+34	val_34+3+7
+35	val_35+3+7
+35	val_35+3+7
+35	val_35+3+7
+37	val_37+3+7
+37	val_37+3+7
+4	val_4+3+7
+41	val_41+3+7
+42	val_42+3+7
+42	val_42+3+7
+43	val_43+3+7
+44	val_44+3+7
+47	val_47+3+7
+5	val_5+3+7
+5	val_5+3+7
+5	val_5+3+7
+51	val_51+3+7
+51	val_51+3+7
+53	val_53+3+7
+54	val_54+3+7
+57	val_57+3+7
+58	val_58+3+7
+58	val_58+3+7
+64	val_64+3+7
+65	val_65+3+7
+66	val_66+3+7
+67	val_67+3+7
+67	val_67+3+7
+69	val_69+3+7
+70	val_70+3+7
+70	val_70+3+7
+70	val_70+3+7
+72	val_72+3+7
+72	val_72+3+7
+74	val_74+3+7
+76	val_76+3+7
+76	val_76+3+7
+77	val_77+3+7
+78	val_78+3+7
+8	val_8+3+7
+80	val_80+3+7
+82	val_82+3+7
+83	val_83+3+7
+83	val_83+3+7
+84	val_84+3+7
+84	val_84+3+7
+85	val_85+3+7
+86	val_86+3+7
+87	val_87+3+7
+9	val_9+3+7
+90	val_90+3+7
+90	val_90+3+7
+90	val_90+3+7
+92	val_92+3+7
+95	val_95+3+7
+95	val_95+3+7
+96	val_96+3+7
+97	val_97+3+7
+97	val_97+3+7
+98	val_98+3+7
+98	val_98+3+7
diff --git a/ql/src/test/results/clientpositive/input20.q.out b/ql/src/test/results/clientpositive/llap/input20.q.out
similarity index 54%
rename from ql/src/test/results/clientpositive/input20.q.out
rename to ql/src/test/results/clientpositive/llap/input20.q.out
index d90b908..819d888 100644
--- a/ql/src/test/results/clientpositive/input20.q.out
+++ b/ql/src/test/results/clientpositive/llap/input20.q.out
@@ -36,74 +36,99 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n138
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), key (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), key (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
                 Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                Transform Operator
+                  command: python input20_script.py
+                  output info:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-          Transform Operator
-            command: python input20_script.py
-            output info:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.dest1_n138
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest1_n138
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -115,7 +140,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n138
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -123,30 +148,6 @@ STAGE PLANS:
           Column Types: int, string
           Table: default.dest1_n138
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
 PREHOOK: query: FROM (
   FROM src
   MAP src.key, src.key
@@ -178,11 +179,11 @@ POSTHOOK: Lineage: dest1_n138.value SCRIPT [(src)src.FieldSchema(name:key, type:
 PREHOOK: query: SELECT * FROM dest1_n138 ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n138
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM dest1_n138 ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n138
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1	105_105
 1	10_10
 1	111_111
diff --git a/ql/src/test/results/clientpositive/input33.q.out b/ql/src/test/results/clientpositive/llap/input33.q.out
similarity index 54%
rename from ql/src/test/results/clientpositive/input33.q.out
rename to ql/src/test/results/clientpositive/llap/input33.q.out
index c8df2ef..daf57de 100644
--- a/ql/src/test/results/clientpositive/input33.q.out
+++ b/ql/src/test/results/clientpositive/llap/input33.q.out
@@ -36,74 +36,99 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n135
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), key (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), key (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
                 Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  null sort order: 
-                  sort order: 
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                Transform Operator
+                  command: python input20_script.py
+                  output info:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-          Transform Operator
-            command: python input20_script.py
-            output info:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-            Statistics: Num rows: 500 Data size: 87000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.dest1_n135
-              Select Operator
-                expressions: _col0 (type: int), _col1 (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                  minReductionHashAggr: 0.99
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest1_n135
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -115,7 +140,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n135
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -123,30 +148,6 @@ STAGE PLANS:
           Column Types: int, string
           Table: default.dest1_n135
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
 PREHOOK: query: FROM (
   FROM src
   MAP src.key, src.key
@@ -178,11 +179,11 @@ POSTHOOK: Lineage: dest1_n135.value SCRIPT [(src)src.FieldSchema(name:key, type:
 PREHOOK: query: SELECT * FROM dest1_n135 ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n135
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM dest1_n135 ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n135
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1	105_105
 1	10_10
 1	111_111
diff --git a/ql/src/test/results/clientpositive/input34.q.out b/ql/src/test/results/clientpositive/llap/input34.q.out
similarity index 66%
rename from ql/src/test/results/clientpositive/input34.q.out
rename to ql/src/test/results/clientpositive/llap/input34.q.out
index 00dd35d..3f702cd 100644
--- a/ql/src/test/results/clientpositive/input34.q.out
+++ b/ql/src/test/results/clientpositive/llap/input34.q.out
@@ -30,81 +30,79 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n161
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.dest1_n161
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: int), _col1 (type: string)
-                    outputColumnNames: key, value
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                      minReductionHashAggr: 0.99
-                      mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        null sort order: 
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Select Operator
+                        expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              name: default.dest1_n161
+                        Select Operator
+                          expressions: _col0 (type: int), _col1 (type: string)
+                          outputColumnNames: key, value
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          Group By Operator
+                            aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                            minReductionHashAggr: 0.99
+                            mode: hash
+                            outputColumnNames: _col0, _col1
+                            Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                            Reduce Output Operator
+                              null sort order: 
+                              sort order: 
+                              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -116,7 +114,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n161
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -124,36 +122,6 @@ STAGE PLANS:
           Column Types: int, string
           Table: default.dest1_n161
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n161
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n161
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
@@ -179,11 +147,11 @@ POSTHOOK: Lineage: dest1_n161.value SCRIPT [(src)src.FieldSchema(name:key, type:
 PREHOOK: query: SELECT dest1_n161.* FROM dest1_n161
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n161
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n161.* FROM dest1_n161
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n161
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 238	val_238
 86	val_86
 311	val_311
diff --git a/ql/src/test/results/clientpositive/input35.q.out b/ql/src/test/results/clientpositive/llap/input35.q.out
similarity index 65%
rename from ql/src/test/results/clientpositive/input35.q.out
rename to ql/src/test/results/clientpositive/llap/input35.q.out
index cee491f..85e5412 100644
--- a/ql/src/test/results/clientpositive/input35.q.out
+++ b/ql/src/test/results/clientpositive/llap/input35.q.out
@@ -30,81 +30,79 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n25
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.dest1_n25
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: int), _col1 (type: string)
-                    outputColumnNames: key, value
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                      minReductionHashAggr: 0.99
-                      mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        null sort order: 
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Select Operator
+                        expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              name: default.dest1_n25
+                        Select Operator
+                          expressions: _col0 (type: int), _col1 (type: string)
+                          outputColumnNames: key, value
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          Group By Operator
+                            aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                            minReductionHashAggr: 0.99
+                            mode: hash
+                            outputColumnNames: _col0, _col1
+                            Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                            Reduce Output Operator
+                              null sort order: 
+                              sort order: 
+                              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -116,7 +114,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n25
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -124,36 +122,6 @@ STAGE PLANS:
           Column Types: int, string
           Table: default.dest1_n25
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n25
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n25
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
@@ -179,11 +147,11 @@ POSTHOOK: Lineage: dest1_n25.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n25.* FROM dest1_n25
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n25
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n25.* FROM dest1_n25
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n25
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 238	val_238
 86	val_86
 311	val_311
diff --git a/ql/src/test/results/clientpositive/input36.q.out b/ql/src/test/results/clientpositive/llap/input36.q.out
similarity index 63%
rename from ql/src/test/results/clientpositive/input36.q.out
rename to ql/src/test/results/clientpositive/llap/input36.q.out
index 45289b2..8a8e30c 100644
--- a/ql/src/test/results/clientpositive/input36.q.out
+++ b/ql/src/test/results/clientpositive/llap/input36.q.out
@@ -30,81 +30,79 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n70
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.dest1_n70
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    expressions: _col0 (type: int), _col1 (type: string)
-                    outputColumnNames: key, value
-                    Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                      minReductionHashAggr: 0.99
-                      mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        null sort order: 
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Select Operator
+                        expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              name: default.dest1_n70
+                        Select Operator
+                          expressions: _col0 (type: int), _col1 (type: string)
+                          outputColumnNames: key, value
+                          Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                          Group By Operator
+                            aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                            minReductionHashAggr: 0.99
+                            mode: hash
+                            outputColumnNames: _col0, _col1
+                            Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                            Reduce Output Operator
+                              null sort order: 
+                              sort order: 
+                              Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -116,7 +114,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n70
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -124,36 +122,6 @@ STAGE PLANS:
           Column Types: int, string
           Table: default.dest1_n70
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n70
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n70
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
@@ -179,11 +147,11 @@ POSTHOOK: Lineage: dest1_n70.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n70.* FROM dest1_n70
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n70
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n70.* FROM dest1_n70
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n70
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 NULL	NULL
 NULL	NULL
 NULL	NULL
diff --git a/ql/src/test/results/clientpositive/input38.q.out b/ql/src/test/results/clientpositive/llap/input38.q.out
similarity index 72%
rename from ql/src/test/results/clientpositive/input38.q.out
rename to ql/src/test/results/clientpositive/llap/input38.q.out
index d46ddf0..161cda2 100644
--- a/ql/src/test/results/clientpositive/input38.q.out
+++ b/ql/src/test/results/clientpositive/llap/input38.q.out
@@ -28,77 +28,75 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n91
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.dest1_n91
+                      Select Operator
+                        expressions: _col0 (type: string), _col1 (type: string)
+                        outputColumnNames: key, value
+                        Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                        Group By Operator
+                          aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                          minReductionHashAggr: 0.99
+                          mode: hash
+                          outputColumnNames: _col0, _col1
+                          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                          Reduce Output Operator
+                            null sort order: 
+                            sort order: 
+                            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                            value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest1_n91
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
-                    minReductionHashAggr: 0.99
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      null sort order: 
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-7
-    Conditional Operator
 
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -110,7 +108,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n91
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -118,36 +116,6 @@ STAGE PLANS:
           Column Types: string, string
           Table: default.dest1_n91
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n91
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest1_n91
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
 PREHOOK: query: FROM (
   FROM src
   SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
@@ -171,11 +139,11 @@ POSTHOOK: Lineage: dest1_n91.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n91.* FROM dest1_n91
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n91
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n91.* FROM dest1_n91
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n91
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 238	val_238	3	7
 86	val_86	3	7
 311	val_311	3	7
diff --git a/ql/src/test/results/clientpositive/llap/input5.q.out b/ql/src/test/results/clientpositive/llap/input5.q.out
new file mode 100644
index 0000000..ab091fe
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/input5.q.out
@@ -0,0 +1,176 @@
+PREHOOK: query: CREATE TABLE dest1_n94(key STRING, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dest1_n94
+POSTHOOK: query: CREATE TABLE dest1_n94(key STRING, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1_n94
+PREHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1_n94
+POSTHOOK: query: EXPLAIN
+FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1_n94
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src_thrift
+                  Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: lint (type: array<int>), lintstring (type: array<struct<myint:int,mystring:string,underscore_int:int>>)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        null sort order: a
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n94
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string)
+                  outputColumnNames: key, value
+                  Statistics: Num rows: 11 Data size: 29480 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 3560 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1_n94
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: string, string
+          Table: default.dest1_n94
+
+PREHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1_n94
+POSTHOOK: query: FROM (
+  FROM src_thrift
+  SELECT TRANSFORM(src_thrift.lint, src_thrift.lintstring)
+         USING 'cat' AS (tkey, tvalue) 
+  CLUSTER BY tkey 
+) tmap
+INSERT OVERWRITE TABLE dest1_n94 SELECT tmap.tkey, tmap.tvalue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1_n94
+POSTHOOK: Lineage: dest1_n94.key SCRIPT [(src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+POSTHOOK: Lineage: dest1_n94.value SCRIPT [(src_thrift)src_thrift.FieldSchema(name:lint, type:array<int>, comment:from deserializer), (src_thrift)src_thrift.FieldSchema(name:lintstring, type:array<struct<myint:int,mystring:string,underscore_int:int>>, comment:from deserializer), ]
+PREHOOK: query: SELECT dest1_n94.* FROM dest1_n94
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1_n94
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT dest1_n94.* FROM dest1_n94
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1_n94
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+NULL	NULL
+[0,0,0]	[{"myint":0,"mystring":"0","underscore_int":0}]
+[1,2,3]	[{"myint":1,"mystring":"1","underscore_int":1}]
+[2,4,6]	[{"myint":4,"mystring":"8","underscore_int":2}]
+[3,6,9]	[{"myint":9,"mystring":"27","underscore_int":3}]
+[4,8,12]	[{"myint":16,"mystring":"64","underscore_int":4}]
+[5,10,15]	[{"myint":25,"mystring":"125","underscore_int":5}]
+[6,12,18]	[{"myint":36,"mystring":"216","underscore_int":6}]
+[7,14,21]	[{"myint":49,"mystring":"343","underscore_int":7}]
+[8,16,24]	[{"myint":64,"mystring":"512","underscore_int":8}]
+[9,18,27]	[{"myint":81,"mystring":"729","underscore_int":9}]
diff --git a/ql/src/test/results/clientpositive/llap/insert_into3.q.out b/ql/src/test/results/clientpositive/llap/insert_into3.q.out
new file mode 100644
index 0000000..9c9820e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/insert_into3.q.out
@@ -0,0 +1,546 @@
+PREHOOK: query: DROP TABLE insert_into3a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into3a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into3b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into3b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into3a (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into3a
+POSTHOOK: query: CREATE TABLE insert_into3a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into3a
+PREHOOK: query: CREATE TABLE insert_into3b (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: CREATE TABLE insert_into3b (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into3b
+PREHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Top N Key Operator
+                    sort order: ++
+                    keys: key (type: string), value (type: string)
+                    null sort order: zz
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    top n: 50
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        null sort order: zz
+                        sort order: ++
+                        Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                  Top N Key Operator
+                    sort order: ++
+                    keys: key (type: string), value (type: string)
+                    null sort order: zz
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    top n: 100
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        null sort order: zz
+                        sort order: ++
+                        Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 50
+                  Statistics: Num rows: 50 Data size: 8900 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 50 Data size: 4750 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.98
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 100
+                  Statistics: Num rows: 100 Data size: 17800 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 100 Data size: 9500 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 5 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3a
+
+  Stage: Stage-4
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into3a
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3b
+
+  Stage: Stage-5
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into3b
+
+PREHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1254133670
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3b
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3b
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1142373758
+PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
+  Stage-4 depends on stages: Stage-0
+  Stage-1 depends on stages: Stage-3
+  Stage-5 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+        Reducer 4 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: string), _col1 (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.9
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 4 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into3b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.9
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 5 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-3
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3a
+
+  Stage: Stage-4
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into3a
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into3b
+
+  Stage: Stage-5
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into3b
+
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into3a
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
+         INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into3a
+POSTHOOK: Output: default@insert_into3b
+POSTHOOK: Lineage: insert_into3a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into3b.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-826625916
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into3b
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into3b
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1968999674
+PREHOOK: query: DROP TABLE insert_into3a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into3a
+PREHOOK: Output: default@insert_into3a
+POSTHOOK: query: DROP TABLE insert_into3a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into3a
+POSTHOOK: Output: default@insert_into3a
+PREHOOK: query: DROP TABLE insert_into3b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into3b
+PREHOOK: Output: default@insert_into3b
+POSTHOOK: query: DROP TABLE insert_into3b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into3b
+POSTHOOK: Output: default@insert_into3b
diff --git a/ql/src/test/results/clientpositive/llap/insert_into4.q.out b/ql/src/test/results/clientpositive/llap/insert_into4.q.out
new file mode 100644
index 0000000..f2e4bab
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/insert_into4.q.out
@@ -0,0 +1,436 @@
+PREHOOK: query: DROP TABLE insert_into4a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into4a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into4b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into4b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into4a (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: CREATE TABLE insert_into4a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into4a
+PREHOOK: query: CREATE TABLE insert_into4b (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: CREATE TABLE insert_into4b (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into4b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into4a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.9
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4a
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into4a
+
+PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-826625916
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into4a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 10 Data size: 950 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.9
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4a
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into4a
+
+PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1653251832
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: default@insert_into4b
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into4a
+                  Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into4b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 20 Data size: 1900 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.95
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4b
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into4b
+
+PREHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: default@insert_into4b
+POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4b
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4b
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-1653251832
+PREHOOK: query: DROP TABLE insert_into4a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: DROP TABLE insert_into4a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: default@insert_into4a
+PREHOOK: query: DROP TABLE insert_into4b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into4b
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: DROP TABLE insert_into4b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into4b
+POSTHOOK: Output: default@insert_into4b
diff --git a/ql/src/test/results/clientpositive/llap/insert_into5.q.out b/ql/src/test/results/clientpositive/llap/insert_into5.q.out
new file mode 100644
index 0000000..2573fd2
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/insert_into5.q.out
@@ -0,0 +1,550 @@
+PREHOOK: query: DROP TABLE insert_into5a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into5a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into5b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into5b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into5a (key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: CREATE TABLE insert_into5a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into5a
+PREHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into5b
+POSTHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into5b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into5a
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: 1 (type: int), 'one' (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 45500 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 10
+                      Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: int), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: int), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.insert_into5a
+                  Select Operator
+                    expressions: _col0 (type: int), _col1 (type: string)
+                    outputColumnNames: key, value
+                    Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                    Group By Operator
+                      aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                      minReductionHashAggr: 0.9
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5a
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into5a
+
+PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into5a
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+481928560
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5a
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into5a
+                  Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into5a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string)
+                      outputColumnNames: key, value
+                      Statistics: Num rows: 10 Data size: 910 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        minReductionHashAggr: 0.9
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 864 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5a
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into5a
+
+PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5a
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+963857120
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5b@ds=1
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into5a
+                  Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into5b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
+                      outputColumnNames: key, value, ds
+                      Statistics: Num rows: 20 Data size: 3520 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        keys: ds (type: string)
+                        minReductionHashAggr: 0.95
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5b
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into5b
+
+PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-18626052920
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
+  SELECT key, value FROM insert_into5b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
+  SELECT key, value FROM insert_into5b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: default@insert_into5b@ds=1
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into5b
+                  Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 20 Data size: 1820 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into5b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
+                      outputColumnNames: key, value, ds
+                      Statistics: Num rows: 20 Data size: 3520 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        keys: ds (type: string)
+                        minReductionHashAggr: 0.95
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5b
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into5b
+
+PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT key, value FROM insert_into5b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT key, value FROM insert_into5b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-37252105840
+PREHOOK: query: DROP TABLE insert_into5a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: DROP TABLE insert_into5a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5a
diff --git a/ql/src/test/results/clientpositive/llap/insert_into6.q.out b/ql/src/test/results/clientpositive/llap/insert_into6.q.out
new file mode 100644
index 0000000..fabcf41
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/insert_into6.q.out
@@ -0,0 +1,356 @@
+PREHOOK: query: DROP TABLE insert_into6a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into6a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into6b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into6b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into6a
+POSTHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into6a
+PREHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@insert_into6b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
+    SELECT * FROM src LIMIT 150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into6a@ds=1
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
+    SELECT * FROM src LIMIT 150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into6a@ds=1
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Limit
+                      Number of rows: 150
+                      Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        null sort order: 
+                        sort order: 
+                        Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
+                        TopN Hash Memory Usage: 0.1
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 150
+                  Statistics: Num rows: 150 Data size: 26700 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 150 Data size: 14250 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into6a
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), '1' (type: string)
+                      outputColumnNames: key, value, ds
+                      Statistics: Num rows: 150 Data size: 27000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        keys: ds (type: string)
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 1 Data size: 949 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 965 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into6a
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into6a
+
+PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into6a@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into6a@ds=1
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into6a@ds=2
+POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into6a@ds=2
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6a
+PREHOOK: Input: default@insert_into6a@ds=1
+PREHOOK: Input: default@insert_into6a@ds=2
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6a
+POSTHOOK: Input: default@insert_into6a@ds=1
+POSTHOOK: Input: default@insert_into6a@ds=2
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-35226404960
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
+    SELECT * FROM insert_into6a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6a
+PREHOOK: Input: default@insert_into6a@ds=1
+PREHOOK: Input: default@insert_into6a@ds=2
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
+    SELECT * FROM insert_into6a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6a
+POSTHOOK: Input: default@insert_into6a@ds=1
+POSTHOOK: Input: default@insert_into6a@ds=2
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: insert_into6a
+                  Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: int), value (type: string), ds (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.insert_into6b
+                    Select Operator
+                      expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string)
+                      outputColumnNames: key, value, ds
+                      Statistics: Num rows: 250 Data size: 69750 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: compute_stats(key, 'hll'), compute_stats(value, 'hll')
+                        keys: ds (type: string)
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 2 Data size: 2096 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 2 Data size: 2096 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1)
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2 Data size: 2128 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into6b
+
+  Stage: Stage-3
+    Stats Work
+      Basic Stats Work:
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.insert_into6b
+
+PREHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6a
+PREHOOK: Input: default@insert_into6a@ds=1
+PREHOOK: Input: default@insert_into6a@ds=2
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6a
+POSTHOOK: Input: default@insert_into6a@ds=1
+POSTHOOK: Input: default@insert_into6a@ds=2
+POSTHOOK: Output: default@insert_into6b@ds=1
+POSTHOOK: Output: default@insert_into6b@ds=2
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6b
+PREHOOK: Input: default@insert_into6b@ds=1
+PREHOOK: Input: default@insert_into6b@ds=2
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6b
+POSTHOOK: Input: default@insert_into6b@ds=1
+POSTHOOK: Input: default@insert_into6b@ds=2
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+-35226404960
+PREHOOK: query: SHOW PARTITIONS insert_into6b
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@insert_into6b
+POSTHOOK: query: SHOW PARTITIONS insert_into6b
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@insert_into6b
+ds=1
+ds=2
+PREHOOK: query: DROP TABLE insert_into6a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into6a
+PREHOOK: Output: default@insert_into6a
+POSTHOOK: query: DROP TABLE insert_into6a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into6a
+POSTHOOK: Output: default@insert_into6a
+PREHOOK: query: DROP TABLE insert_into6b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into6b
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: DROP TABLE insert_into6b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into6b
+POSTHOOK: Output: default@insert_into6b
diff --git a/ql/src/test/results/clientpositive/load_binary_data.q.out b/ql/src/test/results/clientpositive/llap/load_binary_data.q.out
similarity index 96%
rename from ql/src/test/results/clientpositive/load_binary_data.q.out
rename to ql/src/test/results/clientpositive/llap/load_binary_data.q.out
index b0d5c63..d3c0d29 100644
Binary files a/ql/src/test/results/clientpositive/load_binary_data.q.out and b/ql/src/test/results/clientpositive/llap/load_binary_data.q.out differ
diff --git a/ql/src/test/results/clientpositive/macro_1.q.out b/ql/src/test/results/clientpositive/llap/macro_1.q.out
similarity index 91%
rename from ql/src/test/results/clientpositive/macro_1.q.out
rename to ql/src/test/results/clientpositive/llap/macro_1.q.out
index 28230f9..44d6cca 100644
--- a/ql/src/test/results/clientpositive/macro_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/macro_1.q.out
@@ -38,13 +38,13 @@ PREHOOK: query: SELECT
 FROM macro_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT
     CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":", STRING_LEN_PLUS_TWO(x)) a
 FROM macro_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 2:3:4
 1:2:3
 3:4:5
@@ -54,14 +54,14 @@ FROM macro_test
 sort by a
 PREHOOK: type: QUERY
 PREHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT
     CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":", STRING_LEN_PLUS_TWO(x)) a
 FROM macro_test
 sort by a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1:2:3
 2:3:4
 3:4:5
@@ -71,14 +71,14 @@ FROM macro_test
 sort by a desc
 PREHOOK: type: QUERY
 PREHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT
     CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":", STRING_LEN_PLUS_TWO(x)) a
 FROM macro_test
 sort by a desc
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@macro_test
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 3:4:5
 2:3:4
 1:2:3
diff --git a/ql/src/test/results/clientpositive/macro_duplicate.q.out b/ql/src/test/results/clientpositive/llap/macro_duplicate.q.out
similarity index 96%
rename from ql/src/test/results/clientpositive/macro_duplicate.q.out
rename to ql/src/test/results/clientpositive/llap/macro_duplicate.q.out
index 9598126..ccdf735 100644
--- a/ql/src/test/results/clientpositive/macro_duplicate.q.out
+++ b/ql/src/test/results/clientpositive/llap/macro_duplicate.q.out
@@ -47,10 +47,10 @@ POSTHOOK: Output: database:default
 PREHOOK: query: select math_square(a), math_square(b),factorial(a), factorial(b), math_add(a), math_add(b),int(c) from macro_testing order by int(c)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@macro_testing
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select math_square(a), math_square(b),factorial(a), factorial(b), math_add(a), math_add(b),int(c) from macro_testing order by int(c)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@macro_testing
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1	4	1	2	2	4	3
 16	25	24	120	8	10	6
diff --git a/ql/src/test/results/clientpositive/mapreduce3.q.out b/ql/src/test/results/clientpositive/llap/mapreduce3.q.out
similarity index 69%
rename from ql/src/test/results/clientpositive/mapreduce3.q.out
rename to ql/src/test/results/clientpositive/llap/mapreduce3.q.out
index 9c0157c..f691d40 100644
--- a/ql/src/test/results/clientpositive/mapreduce3.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapreduce3.q.out
@@ -26,63 +26,89 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n23
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col3 (type: string), _col0 (type: string)
-                  null sort order: zz
-                  sort order: ++
-                  Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: UDFToInteger(VALUE._col0) (type: int), UDFToInteger(VALUE._col1) (type: int), UDFToInteger(VALUE._col2) (type: int), VALUE._col3 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n23
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string)
-            outputColumnNames: key, ten, one, value
-            Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col3 (type: string), _col0 (type: string)
+                        null sort order: zz
+                        sort order: ++
+                        Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), UDFToInteger(VALUE._col1) (type: int), UDFToInteger(VALUE._col2) (type: int), VALUE._col3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n23
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string)
+                  outputColumnNames: key, ten, one, value
+                  Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -94,7 +120,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n23
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -102,30 +128,6 @@ STAGE PLANS:
           Column Types: int, int, int, string
           Table: default.dest1_n23
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1_n23
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
@@ -149,11 +151,11 @@ POSTHOOK: Lineage: dest1_n23.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n23.* FROM dest1_n23
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n23
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n23.* FROM dest1_n23
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n23
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0
diff --git a/ql/src/test/results/clientpositive/mapreduce4.q.out b/ql/src/test/results/clientpositive/llap/mapreduce4.q.out
similarity index 69%
rename from ql/src/test/results/clientpositive/mapreduce4.q.out
rename to ql/src/test/results/clientpositive/llap/mapreduce4.q.out
index a606df0..12882a8 100644
--- a/ql/src/test/results/clientpositive/mapreduce4.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapreduce4.q.out
@@ -28,64 +28,90 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n93
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col1 (type: string), _col2 (type: string)
-                  null sort order: zz
-                  sort order: -+
-                  Map-reduce partition columns: _col3 (type: string), _col0 (type: string)
-                  Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: UDFToInteger(VALUE._col0) (type: int), UDFToInteger(VALUE._col1) (type: int), UDFToInteger(VALUE._col2) (type: int), VALUE._col3 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n93
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string)
-            outputColumnNames: key, ten, one, value
-            Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: string), _col2 (type: string)
+                        null sort order: zz
+                        sort order: -+
+                        Map-reduce partition columns: _col3 (type: string), _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 93000 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), UDFToInteger(VALUE._col1) (type: int), UDFToInteger(VALUE._col2) (type: int), VALUE._col3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n93
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string)
+                  outputColumnNames: key, ten, one, value
+                  Statistics: Num rows: 500 Data size: 51500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -97,7 +123,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n93
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -105,30 +131,6 @@ STAGE PLANS:
           Column Types: int, int, int, string
           Table: default.dest1_n93
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 1712 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 1760 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1_n93
 MAP src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
@@ -154,11 +156,11 @@ POSTHOOK: Lineage: dest1_n93.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n93.* FROM dest1_n93
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n93
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n93.* FROM dest1_n93
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n93
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 90	9	0	val_90
 90	9	0	val_90
 90	9	0	val_90
diff --git a/ql/src/test/results/clientpositive/mapreduce7.q.out b/ql/src/test/results/clientpositive/llap/mapreduce7.q.out
similarity index 76%
rename from ql/src/test/results/clientpositive/mapreduce7.q.out
rename to ql/src/test/results/clientpositive/llap/mapreduce7.q.out
index ab369e6..10d42ce 100644
--- a/ql/src/test/results/clientpositive/mapreduce7.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapreduce7.q.out
@@ -26,63 +26,89 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n37
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string), key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col5 (type: string), _col2 (type: string)
-                  null sort order: zz
-                  sort order: ++
-                  Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), UDFToInteger(VALUE._col2) (type: int), UDFToInteger(VALUE._col3) (type: int), UDFToInteger(VALUE._col4) (type: int), VALUE._col5 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n37
-          Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: string)
-            outputColumnNames: k, v, key, ten, one, value
-            Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(k, 'hll'), compute_stats(v, 'hll'), compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 1 Data size: 2592 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                    Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col5 (type: string), _col2 (type: string)
+                        null sort order: zz
+                        sort order: ++
+                        Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
+                        value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), UDFToInteger(VALUE._col2) (type: int), UDFToInteger(VALUE._col3) (type: int), UDFToInteger(VALUE._col4) (type: int), VALUE._col5 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest1_n37
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+                  outputColumnNames: k, v, key, ten, one, value
+                  Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(k, 'hll'), compute_stats(v, 'hll'), compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
+                    minReductionHashAggr: 0.99
+                    mode: hash
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                    Statistics: Num rows: 1 Data size: 2592 Basic stats: COMPLETE Column stats: COMPLETE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Statistics: Num rows: 1 Data size: 2592 Basic stats: COMPLETE Column stats: COMPLETE
+                      value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col4 (ty [...]
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4), compute_stats(VALUE._col5)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 1 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Dependency Collection
 
   Stage: Stage-0
     Move Operator
@@ -94,7 +120,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.dest1_n37
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats Work
       Basic Stats Work:
       Column Stats Desc:
@@ -102,30 +128,6 @@ STAGE PLANS:
           Column Types: string, string, int, int, int, string
           Table: default.dest1_n37
 
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              null sort order: 
-              sort order: 
-              Statistics: Num rows: 1 Data size: 2592 Basic stats: COMPLETE Column stats: COMPLETE
-              value expressions: _col0 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col1 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: stru [...]
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: compute_stats(VALUE._col0), compute_stats(VALUE._col1), compute_stats(VALUE._col2), compute_stats(VALUE._col3), compute_stats(VALUE._col4), compute_stats(VALUE._col5)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 1 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 1 Data size: 2640 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1_n37
 MAP src.*, src.key, CAST(src.key / 10 AS INT), CAST(src.key % 10 AS INT), src.value
@@ -151,11 +153,11 @@ POSTHOOK: Lineage: dest1_n37.value SCRIPT [(src)src.FieldSchema(name:key, type:s
 PREHOOK: query: SELECT dest1_n37.* FROM dest1_n37
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1_n37
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1_n37.* FROM dest1_n37
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n37
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0
 0	val_0	0	0	0	val_0
diff --git a/ql/src/test/results/clientpositive/mapreduce8.q.out b/ql/src/test/results/clientpositive/llap/mapreduce8.q.out
similarity index 75%
rename from ql/src/test/results/clientpositive/mapreduce8.q.out
rename to ql/src/test/results/clientpositive/llap/mapreduce8.q.out
index d00ede8..1a38974 100644
--- a/ql/src/test/results/clientpositive/mapreduce8.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapreduce8.q.out
@@ -28,64 +28,90 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@dest1_n158
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0, Stage-3
-  Stage-3 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
 
 STAGE PLANS:
   Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: key (type: string), value (type: string), key (type: string), UDFToInteger((key / 10)) (type: int), UDFToInteger((key % 10)) (type: int), value (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col5 (type: string), _col2 (type: string)
-                  null sort order: zz
-                  sort order: ++
-                  Map-reduce partition columns: rand(3) (type: double)
-                  Statistics: Num rows: 500 Data size: 182000 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), UDFToInteger(VALUE._col2) (type: int), UDFToInteger(VALUE._col3) (type: int), UDFToInteger(VALUE._col4) (type: int), VALUE._col5 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.dest1_n158
-          Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: string)
-            outputColumnNames: k, v, key, ten, one, value
-            Statistics: Num rows: 500 Data size: 140500 Basic stats: COMPLETE Column stats: COMPLETE
-            Group By Operator
-              aggregations: compute_stats(k, 'hll'), compute_stats(v, 'hll'), compute_stats(key, 'hll'), compute_stats(ten, 'hll'), compute_stats(one, 'hll'), compute_stats(value, 'hll')
-              minReductionHashAggr: 0.99
-              mode: hash
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 1 Data size: 2592 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
... 9415 lines suppressed ...