You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2017/02/03 21:50:21 UTC

[08/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input12.q.out b/ql/src/test/results/beelinepositive/input12.q.out
deleted file mode 100644
index 457a836..0000000
--- a/ql/src/test/results/beelinepositive/input12.q.out
+++ /dev/null
@@ -1,814 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input12.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input12.q
->>>  set mapred.job.tracker=does.notexist.com:666;
-No rows affected 
->>>  set hive.exec.mode.local.auto=true;
-No rows affected 
->>>  
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 
-INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 200))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-3 is a root stage'
-'  Stage-9 depends on stages: Stage-3 , consists of Stage-6, Stage-5, Stage-7'
-'  Stage-6'
-'  Stage-0 depends on stages: Stage-6, Stage-5, Stage-8'
-'  Stage-4 depends on stages: Stage-0'
-'  Stage-5'
-'  Stage-7'
-'  Stage-8 depends on stages: Stage-7'
-'  Stage-15 depends on stages: Stage-3 , consists of Stage-12, Stage-11, Stage-13'
-'  Stage-12'
-'  Stage-1 depends on stages: Stage-12, Stage-11, Stage-14'
-'  Stage-10 depends on stages: Stage-1'
-'  Stage-11'
-'  Stage-13'
-'  Stage-14 depends on stages: Stage-13'
-'  Stage-21 depends on stages: Stage-3 , consists of Stage-18, Stage-17, Stage-19'
-'  Stage-18'
-'  Stage-2 depends on stages: Stage-18, Stage-17, Stage-20'
-'  Stage-16 depends on stages: Stage-2'
-'  Stage-17'
-'  Stage-19'
-'  Stage-20 depends on stages: Stage-19'
-''
-'STAGE PLANS:'
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key < 100.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                  outputColumnNames: _col0, _col1'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 1'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input12.dest1'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((key >= 100.0) and (key < 200.0))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                  outputColumnNames: _col0, _col1'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 2'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input12.dest2'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key >= 200.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                outputColumnNames: _col0'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                  outputColumnNames: _col0'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 3'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input12.dest3'
-''
-'  Stage: Stage-9'
-'    Conditional Operator'
-''
-'  Stage: Stage-6'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input12.dest1'
-''
-'  Stage: Stage-4'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-5'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest1'
-''
-'  Stage: Stage-7'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest1'
-''
-'  Stage: Stage-8'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-15'
-'    Conditional Operator'
-''
-'  Stage: Stage-12'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input12.dest2'
-''
-'  Stage: Stage-10'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-11'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest2'
-''
-'  Stage: Stage-13'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest2'
-''
-'  Stage: Stage-14'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-21'
-'    Conditional Operator'
-''
-'  Stage: Stage-18'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      tables:'
-'          partition:'
-'            ds 2008-04-08'
-'            hr 12'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input12.dest3'
-''
-'  Stage: Stage-16'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-17'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest3'
-''
-'  Stage: Stage-19'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input12.dest3'
-''
-'  Stage: Stage-20'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-275 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 
-INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200;
-'_col0'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'86','val_86'
-'27','val_27'
-'98','val_98'
-'66','val_66'
-'37','val_37'
-'15','val_15'
-'82','val_82'
-'17','val_17'
-'0','val_0'
-'57','val_57'
-'20','val_20'
-'92','val_92'
-'47','val_47'
-'72','val_72'
-'4','val_4'
-'35','val_35'
-'54','val_54'
-'51','val_51'
-'65','val_65'
-'83','val_83'
-'12','val_12'
-'67','val_67'
-'84','val_84'
-'58','val_58'
-'8','val_8'
-'24','val_24'
-'42','val_42'
-'0','val_0'
-'96','val_96'
-'26','val_26'
-'51','val_51'
-'43','val_43'
-'95','val_95'
-'98','val_98'
-'85','val_85'
-'77','val_77'
-'0','val_0'
-'87','val_87'
-'15','val_15'
-'72','val_72'
-'90','val_90'
-'19','val_19'
-'10','val_10'
-'5','val_5'
-'58','val_58'
-'35','val_35'
-'95','val_95'
-'11','val_11'
-'34','val_34'
-'42','val_42'
-'78','val_78'
-'76','val_76'
-'41','val_41'
-'30','val_30'
-'64','val_64'
-'76','val_76'
-'74','val_74'
-'69','val_69'
-'33','val_33'
-'70','val_70'
-'5','val_5'
-'2','val_2'
-'35','val_35'
-'80','val_80'
-'44','val_44'
-'53','val_53'
-'90','val_90'
-'12','val_12'
-'5','val_5'
-'70','val_70'
-'24','val_24'
-'70','val_70'
-'83','val_83'
-'26','val_26'
-'67','val_67'
-'18','val_18'
-'9','val_9'
-'18','val_18'
-'97','val_97'
-'84','val_84'
-'28','val_28'
-'37','val_37'
-'90','val_90'
-'97','val_97'
-84 rows selected 
->>>  SELECT dest2.* FROM dest2;
-'key','value'
-'165','val_165'
-'193','val_193'
-'150','val_150'
-'128','val_128'
-'146','val_146'
-'152','val_152'
-'145','val_145'
-'166','val_166'
-'153','val_153'
-'193','val_193'
-'174','val_174'
-'199','val_199'
-'174','val_174'
-'162','val_162'
-'167','val_167'
-'195','val_195'
-'113','val_113'
-'155','val_155'
-'128','val_128'
-'149','val_149'
-'129','val_129'
-'170','val_170'
-'157','val_157'
-'111','val_111'
-'169','val_169'
-'125','val_125'
-'192','val_192'
-'187','val_187'
-'176','val_176'
-'138','val_138'
-'103','val_103'
-'176','val_176'
-'137','val_137'
-'180','val_180'
-'181','val_181'
-'138','val_138'
-'179','val_179'
-'172','val_172'
-'129','val_129'
-'158','val_158'
-'119','val_119'
-'197','val_197'
-'100','val_100'
-'199','val_199'
-'191','val_191'
-'165','val_165'
-'120','val_120'
-'131','val_131'
-'156','val_156'
-'196','val_196'
-'197','val_197'
-'187','val_187'
-'137','val_137'
-'169','val_169'
-'179','val_179'
-'118','val_118'
-'134','val_134'
-'138','val_138'
-'118','val_118'
-'177','val_177'
-'168','val_168'
-'143','val_143'
-'160','val_160'
-'195','val_195'
-'119','val_119'
-'149','val_149'
-'138','val_138'
-'103','val_103'
-'113','val_113'
-'167','val_167'
-'116','val_116'
-'191','val_191'
-'128','val_128'
-'193','val_193'
-'104','val_104'
-'175','val_175'
-'105','val_105'
-'190','val_190'
-'114','val_114'
-'164','val_164'
-'125','val_125'
-'164','val_164'
-'187','val_187'
-'104','val_104'
-'163','val_163'
-'119','val_119'
-'199','val_199'
-'120','val_120'
-'169','val_169'
-'178','val_178'
-'136','val_136'
-'172','val_172'
-'133','val_133'
-'175','val_175'
-'189','val_189'
-'134','val_134'
-'100','val_100'
-'146','val_146'
-'186','val_186'
-'167','val_167'
-'183','val_183'
-'152','val_152'
-'194','val_194'
-'126','val_126'
-'169','val_169'
-105 rows selected 
->>>  SELECT dest3.* FROM dest3;
-'key','ds','hr'
-'238','2008-04-08','12'
-'311','2008-04-08','12'
-'409','2008-04-08','12'
-'255','2008-04-08','12'
-'278','2008-04-08','12'
-'484','2008-04-08','12'
-'265','2008-04-08','12'
-'401','2008-04-08','12'
-'273','2008-04-08','12'
-'224','2008-04-08','12'
-'369','2008-04-08','12'
-'213','2008-04-08','12'
-'406','2008-04-08','12'
-'429','2008-04-08','12'
-'374','2008-04-08','12'
-'469','2008-04-08','12'
-'495','2008-04-08','12'
-'327','2008-04-08','12'
-'281','2008-04-08','12'
-'277','2008-04-08','12'
-'209','2008-04-08','12'
-'403','2008-04-08','12'
-'417','2008-04-08','12'
-'430','2008-04-08','12'
-'252','2008-04-08','12'
-'292','2008-04-08','12'
-'219','2008-04-08','12'
-'287','2008-04-08','12'
-'338','2008-04-08','12'
-'446','2008-04-08','12'
-'459','2008-04-08','12'
-'394','2008-04-08','12'
-'237','2008-04-08','12'
-'482','2008-04-08','12'
-'413','2008-04-08','12'
-'494','2008-04-08','12'
-'207','2008-04-08','12'
-'466','2008-04-08','12'
-'208','2008-04-08','12'
-'399','2008-04-08','12'
-'396','2008-04-08','12'
-'247','2008-04-08','12'
-'417','2008-04-08','12'
-'489','2008-04-08','12'
-'377','2008-04-08','12'
-'397','2008-04-08','12'
-'309','2008-04-08','12'
-'365','2008-04-08','12'
-'266','2008-04-08','12'
-'439','2008-04-08','12'
-'342','2008-04-08','12'
-'367','2008-04-08','12'
-'325','2008-04-08','12'
-'475','2008-04-08','12'
-'203','2008-04-08','12'
-'339','2008-04-08','12'
-'455','2008-04-08','12'
-'311','2008-04-08','12'
-'316','2008-04-08','12'
-'302','2008-04-08','12'
-'205','2008-04-08','12'
-'438','2008-04-08','12'
-'345','2008-04-08','12'
-'489','2008-04-08','12'
-'378','2008-04-08','12'
-'221','2008-04-08','12'
-'280','2008-04-08','12'
-'427','2008-04-08','12'
-'277','2008-04-08','12'
-'208','2008-04-08','12'
-'356','2008-04-08','12'
-'399','2008-04-08','12'
-'382','2008-04-08','12'
-'498','2008-04-08','12'
-'386','2008-04-08','12'
-'437','2008-04-08','12'
-'469','2008-04-08','12'
-'286','2008-04-08','12'
-'459','2008-04-08','12'
-'239','2008-04-08','12'
-'213','2008-04-08','12'
-'216','2008-04-08','12'
-'430','2008-04-08','12'
-'278','2008-04-08','12'
-'289','2008-04-08','12'
-'221','2008-04-08','12'
-'318','2008-04-08','12'
-'332','2008-04-08','12'
-'311','2008-04-08','12'
-'275','2008-04-08','12'
-'241','2008-04-08','12'
-'333','2008-04-08','12'
-'284','2008-04-08','12'
-'230','2008-04-08','12'
-'260','2008-04-08','12'
-'404','2008-04-08','12'
-'384','2008-04-08','12'
-'489','2008-04-08','12'
-'353','2008-04-08','12'
-'373','2008-04-08','12'
-'272','2008-04-08','12'
-'217','2008-04-08','12'
-'348','2008-04-08','12'
-'466','2008-04-08','12'
-'411','2008-04-08','12'
-'230','2008-04-08','12'
-'208','2008-04-08','12'
-'348','2008-04-08','12'
-'463','2008-04-08','12'
-'431','2008-04-08','12'
-'496','2008-04-08','12'
-'322','2008-04-08','12'
-'468','2008-04-08','12'
-'393','2008-04-08','12'
-'454','2008-04-08','12'
-'298','2008-04-08','12'
-'418','2008-04-08','12'
-'327','2008-04-08','12'
-'230','2008-04-08','12'
-'205','2008-04-08','12'
-'404','2008-04-08','12'
-'436','2008-04-08','12'
-'469','2008-04-08','12'
-'468','2008-04-08','12'
-'308','2008-04-08','12'
-'288','2008-04-08','12'
-'481','2008-04-08','12'
-'457','2008-04-08','12'
-'282','2008-04-08','12'
-'318','2008-04-08','12'
-'318','2008-04-08','12'
-'409','2008-04-08','12'
-'470','2008-04-08','12'
-'369','2008-04-08','12'
-'316','2008-04-08','12'
-'413','2008-04-08','12'
-'490','2008-04-08','12'
-'364','2008-04-08','12'
-'395','2008-04-08','12'
-'282','2008-04-08','12'
-'238','2008-04-08','12'
-'419','2008-04-08','12'
-'307','2008-04-08','12'
-'435','2008-04-08','12'
-'277','2008-04-08','12'
-'273','2008-04-08','12'
-'306','2008-04-08','12'
-'224','2008-04-08','12'
-'309','2008-04-08','12'
-'389','2008-04-08','12'
-'327','2008-04-08','12'
-'242','2008-04-08','12'
-'369','2008-04-08','12'
-'392','2008-04-08','12'
-'272','2008-04-08','12'
-'331','2008-04-08','12'
-'401','2008-04-08','12'
-'242','2008-04-08','12'
-'452','2008-04-08','12'
-'226','2008-04-08','12'
-'497','2008-04-08','12'
-'402','2008-04-08','12'
-'396','2008-04-08','12'
-'317','2008-04-08','12'
-'395','2008-04-08','12'
-'336','2008-04-08','12'
-'229','2008-04-08','12'
-'233','2008-04-08','12'
-'472','2008-04-08','12'
-'322','2008-04-08','12'
-'498','2008-04-08','12'
-'321','2008-04-08','12'
-'430','2008-04-08','12'
-'489','2008-04-08','12'
-'458','2008-04-08','12'
-'223','2008-04-08','12'
-'492','2008-04-08','12'
-'449','2008-04-08','12'
-'218','2008-04-08','12'
-'228','2008-04-08','12'
-'453','2008-04-08','12'
-'209','2008-04-08','12'
-'468','2008-04-08','12'
-'342','2008-04-08','12'
-'230','2008-04-08','12'
-'368','2008-04-08','12'
-'296','2008-04-08','12'
-'216','2008-04-08','12'
-'367','2008-04-08','12'
-'344','2008-04-08','12'
-'274','2008-04-08','12'
-'219','2008-04-08','12'
-'239','2008-04-08','12'
-'485','2008-04-08','12'
-'223','2008-04-08','12'
-'256','2008-04-08','12'
-'263','2008-04-08','12'
-'487','2008-04-08','12'
-'480','2008-04-08','12'
-'401','2008-04-08','12'
-'288','2008-04-08','12'
-'244','2008-04-08','12'
-'438','2008-04-08','12'
-'467','2008-04-08','12'
-'432','2008-04-08','12'
-'202','2008-04-08','12'
-'316','2008-04-08','12'
-'229','2008-04-08','12'
-'469','2008-04-08','12'
-'463','2008-04-08','12'
-'280','2008-04-08','12'
-'283','2008-04-08','12'
-'331','2008-04-08','12'
-'235','2008-04-08','12'
-'321','2008-04-08','12'
-'335','2008-04-08','12'
-'466','2008-04-08','12'
-'366','2008-04-08','12'
-'403','2008-04-08','12'
-'483','2008-04-08','12'
-'257','2008-04-08','12'
-'406','2008-04-08','12'
-'409','2008-04-08','12'
-'406','2008-04-08','12'
-'401','2008-04-08','12'
-'258','2008-04-08','12'
-'203','2008-04-08','12'
-'262','2008-04-08','12'
-'348','2008-04-08','12'
-'424','2008-04-08','12'
-'396','2008-04-08','12'
-'201','2008-04-08','12'
-'217','2008-04-08','12'
-'431','2008-04-08','12'
-'454','2008-04-08','12'
-'478','2008-04-08','12'
-'298','2008-04-08','12'
-'431','2008-04-08','12'
-'424','2008-04-08','12'
-'382','2008-04-08','12'
-'397','2008-04-08','12'
-'480','2008-04-08','12'
-'291','2008-04-08','12'
-'351','2008-04-08','12'
-'255','2008-04-08','12'
-'438','2008-04-08','12'
-'414','2008-04-08','12'
-'200','2008-04-08','12'
-'491','2008-04-08','12'
-'237','2008-04-08','12'
-'439','2008-04-08','12'
-'360','2008-04-08','12'
-'248','2008-04-08','12'
-'479','2008-04-08','12'
-'305','2008-04-08','12'
-'417','2008-04-08','12'
-'444','2008-04-08','12'
-'429','2008-04-08','12'
-'443','2008-04-08','12'
-'323','2008-04-08','12'
-'325','2008-04-08','12'
-'277','2008-04-08','12'
-'230','2008-04-08','12'
-'478','2008-04-08','12'
-'468','2008-04-08','12'
-'310','2008-04-08','12'
-'317','2008-04-08','12'
-'333','2008-04-08','12'
-'493','2008-04-08','12'
-'460','2008-04-08','12'
-'207','2008-04-08','12'
-'249','2008-04-08','12'
-'265','2008-04-08','12'
-'480','2008-04-08','12'
-'353','2008-04-08','12'
-'214','2008-04-08','12'
-'462','2008-04-08','12'
-'233','2008-04-08','12'
-'406','2008-04-08','12'
-'454','2008-04-08','12'
-'375','2008-04-08','12'
-'401','2008-04-08','12'
-'421','2008-04-08','12'
-'407','2008-04-08','12'
-'384','2008-04-08','12'
-'256','2008-04-08','12'
-'384','2008-04-08','12'
-'379','2008-04-08','12'
-'462','2008-04-08','12'
-'492','2008-04-08','12'
-'298','2008-04-08','12'
-'341','2008-04-08','12'
-'498','2008-04-08','12'
-'458','2008-04-08','12'
-'362','2008-04-08','12'
-'285','2008-04-08','12'
-'348','2008-04-08','12'
-'273','2008-04-08','12'
-'281','2008-04-08','12'
-'344','2008-04-08','12'
-'469','2008-04-08','12'
-'315','2008-04-08','12'
-'448','2008-04-08','12'
-'348','2008-04-08','12'
-'307','2008-04-08','12'
-'414','2008-04-08','12'
-'477','2008-04-08','12'
-'222','2008-04-08','12'
-'403','2008-04-08','12'
-'400','2008-04-08','12'
-'200','2008-04-08','12'
-311 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input13.q.out b/ql/src/test/results/beelinepositive/input13.q.out
deleted file mode 100644
index 6a0266a..0000000
--- a/ql/src/test/results/beelinepositive/input13.q.out
+++ /dev/null
@@ -1,669 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input13.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input13.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 
-INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300 
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/dest4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 300))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-4 is a root stage'
-'  Stage-10 depends on stages: Stage-4 , consists of Stage-7, Stage-6, Stage-8'
-'  Stage-7'
-'  Stage-0 depends on stages: Stage-7, Stage-6, Stage-9'
-'  Stage-5 depends on stages: Stage-0'
-'  Stage-6'
-'  Stage-8'
-'  Stage-9 depends on stages: Stage-8'
-'  Stage-16 depends on stages: Stage-4 , consists of Stage-13, Stage-12, Stage-14'
-'  Stage-13'
-'  Stage-1 depends on stages: Stage-13, Stage-12, Stage-15'
-'  Stage-11 depends on stages: Stage-1'
-'  Stage-12'
-'  Stage-14'
-'  Stage-15 depends on stages: Stage-14'
-'  Stage-22 depends on stages: Stage-4 , consists of Stage-19, Stage-18, Stage-20'
-'  Stage-19'
-'  Stage-2 depends on stages: Stage-19, Stage-18, Stage-21'
-'  Stage-17 depends on stages: Stage-2'
-'  Stage-18'
-'  Stage-20'
-'  Stage-21 depends on stages: Stage-20'
-'  Stage-27 depends on stages: Stage-4 , consists of Stage-24, Stage-23, Stage-25'
-'  Stage-24'
-'  Stage-3 depends on stages: Stage-24, Stage-23, Stage-26'
-'  Stage-23'
-'  Stage-25'
-'  Stage-26 depends on stages: Stage-25'
-''
-'STAGE PLANS:'
-'  Stage: Stage-4'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key < 100.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                  outputColumnNames: _col0, _col1'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 1'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input13.dest1'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((key >= 100.0) and (key < 200.0))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                  outputColumnNames: _col0, _col1'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 2'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input13.dest2'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((key >= 200.0) and (key < 300.0))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                outputColumnNames: _col0'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                  outputColumnNames: _col0'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 3'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input13.dest3'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key >= 300.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 4'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-10'
-'    Conditional Operator'
-''
-'  Stage: Stage-7'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input13.dest1'
-''
-'  Stage: Stage-5'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-6'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest1'
-''
-'  Stage: Stage-8'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest1'
-''
-'  Stage: Stage-9'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-16'
-'    Conditional Operator'
-''
-'  Stage: Stage-13'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input13.dest2'
-''
-'  Stage: Stage-11'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-12'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest2'
-''
-'  Stage: Stage-14'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest2'
-''
-'  Stage: Stage-15'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-22'
-'    Conditional Operator'
-''
-'  Stage: Stage-19'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      tables:'
-'          partition:'
-'            ds 2008-04-08'
-'            hr 12'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input13.dest3'
-''
-'  Stage: Stage-17'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-18'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest3'
-''
-'  Stage: Stage-20'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: input13.dest3'
-''
-'  Stage: Stage-21'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-27'
-'    Conditional Operator'
-''
-'  Stage: Stage-24'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-3'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: ../build/ql/test/data/warehouse/dest4.out'
-''
-'  Stage: Stage-23'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-25'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-26'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-''
-339 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200 
-INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200 and src.key < 300 
-INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/dest4.out' SELECT src.value WHERE src.key >= 300;
-'value'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'86','val_86'
-'27','val_27'
-'98','val_98'
-'66','val_66'
-'37','val_37'
-'15','val_15'
-'82','val_82'
-'17','val_17'
-'0','val_0'
-'57','val_57'
-'20','val_20'
-'92','val_92'
-'47','val_47'
-'72','val_72'
-'4','val_4'
-'35','val_35'
-'54','val_54'
-'51','val_51'
-'65','val_65'
-'83','val_83'
-'12','val_12'
-'67','val_67'
-'84','val_84'
-'58','val_58'
-'8','val_8'
-'24','val_24'
-'42','val_42'
-'0','val_0'
-'96','val_96'
-'26','val_26'
-'51','val_51'
-'43','val_43'
-'95','val_95'
-'98','val_98'
-'85','val_85'
-'77','val_77'
-'0','val_0'
-'87','val_87'
-'15','val_15'
-'72','val_72'
-'90','val_90'
-'19','val_19'
-'10','val_10'
-'5','val_5'
-'58','val_58'
-'35','val_35'
-'95','val_95'
-'11','val_11'
-'34','val_34'
-'42','val_42'
-'78','val_78'
-'76','val_76'
-'41','val_41'
-'30','val_30'
-'64','val_64'
-'76','val_76'
-'74','val_74'
-'69','val_69'
-'33','val_33'
-'70','val_70'
-'5','val_5'
-'2','val_2'
-'35','val_35'
-'80','val_80'
-'44','val_44'
-'53','val_53'
-'90','val_90'
-'12','val_12'
-'5','val_5'
-'70','val_70'
-'24','val_24'
-'70','val_70'
-'83','val_83'
-'26','val_26'
-'67','val_67'
-'18','val_18'
-'9','val_9'
-'18','val_18'
-'97','val_97'
-'84','val_84'
-'28','val_28'
-'37','val_37'
-'90','val_90'
-'97','val_97'
-84 rows selected 
->>>  SELECT dest2.* FROM dest2;
-'key','value'
-'165','val_165'
-'193','val_193'
-'150','val_150'
-'128','val_128'
-'146','val_146'
-'152','val_152'
-'145','val_145'
-'166','val_166'
-'153','val_153'
-'193','val_193'
-'174','val_174'
-'199','val_199'
-'174','val_174'
-'162','val_162'
-'167','val_167'
-'195','val_195'
-'113','val_113'
-'155','val_155'
-'128','val_128'
-'149','val_149'
-'129','val_129'
-'170','val_170'
-'157','val_157'
-'111','val_111'
-'169','val_169'
-'125','val_125'
-'192','val_192'
-'187','val_187'
-'176','val_176'
-'138','val_138'
-'103','val_103'
-'176','val_176'
-'137','val_137'
-'180','val_180'
-'181','val_181'
-'138','val_138'
-'179','val_179'
-'172','val_172'
-'129','val_129'
-'158','val_158'
-'119','val_119'
-'197','val_197'
-'100','val_100'
-'199','val_199'
-'191','val_191'
-'165','val_165'
-'120','val_120'
-'131','val_131'
-'156','val_156'
-'196','val_196'
-'197','val_197'
-'187','val_187'
-'137','val_137'
-'169','val_169'
-'179','val_179'
-'118','val_118'
-'134','val_134'
-'138','val_138'
-'118','val_118'
-'177','val_177'
-'168','val_168'
-'143','val_143'
-'160','val_160'
-'195','val_195'
-'119','val_119'
-'149','val_149'
-'138','val_138'
-'103','val_103'
-'113','val_113'
-'167','val_167'
-'116','val_116'
-'191','val_191'
-'128','val_128'
-'193','val_193'
-'104','val_104'
-'175','val_175'
-'105','val_105'
-'190','val_190'
-'114','val_114'
-'164','val_164'
-'125','val_125'
-'164','val_164'
-'187','val_187'
-'104','val_104'
-'163','val_163'
-'119','val_119'
-'199','val_199'
-'120','val_120'
-'169','val_169'
-'178','val_178'
-'136','val_136'
-'172','val_172'
-'133','val_133'
-'175','val_175'
-'189','val_189'
-'134','val_134'
-'100','val_100'
-'146','val_146'
-'186','val_186'
-'167','val_167'
-'183','val_183'
-'152','val_152'
-'194','val_194'
-'126','val_126'
-'169','val_169'
-105 rows selected 
->>>  SELECT dest3.* FROM dest3;
-'key','ds','hr'
-'238','2008-04-08','12'
-'255','2008-04-08','12'
-'278','2008-04-08','12'
-'265','2008-04-08','12'
-'273','2008-04-08','12'
-'224','2008-04-08','12'
-'213','2008-04-08','12'
-'281','2008-04-08','12'
-'277','2008-04-08','12'
-'209','2008-04-08','12'
-'252','2008-04-08','12'
-'292','2008-04-08','12'
-'219','2008-04-08','12'
-'287','2008-04-08','12'
-'237','2008-04-08','12'
-'207','2008-04-08','12'
-'208','2008-04-08','12'
-'247','2008-04-08','12'
-'266','2008-04-08','12'
-'203','2008-04-08','12'
-'205','2008-04-08','12'
-'221','2008-04-08','12'
-'280','2008-04-08','12'
-'277','2008-04-08','12'
-'208','2008-04-08','12'
-'286','2008-04-08','12'
-'239','2008-04-08','12'
-'213','2008-04-08','12'
-'216','2008-04-08','12'
-'278','2008-04-08','12'
-'289','2008-04-08','12'
-'221','2008-04-08','12'
-'275','2008-04-08','12'
-'241','2008-04-08','12'
-'284','2008-04-08','12'
-'230','2008-04-08','12'
-'260','2008-04-08','12'
-'272','2008-04-08','12'
-'217','2008-04-08','12'
-'230','2008-04-08','12'
-'208','2008-04-08','12'
-'298','2008-04-08','12'
-'230','2008-04-08','12'
-'205','2008-04-08','12'
-'288','2008-04-08','12'
-'282','2008-04-08','12'
-'282','2008-04-08','12'
-'238','2008-04-08','12'
-'277','2008-04-08','12'
-'273','2008-04-08','12'
-'224','2008-04-08','12'
-'242','2008-04-08','12'
-'272','2008-04-08','12'
-'242','2008-04-08','12'
-'226','2008-04-08','12'
-'229','2008-04-08','12'
-'233','2008-04-08','12'
-'223','2008-04-08','12'
-'218','2008-04-08','12'
-'228','2008-04-08','12'
-'209','2008-04-08','12'
-'230','2008-04-08','12'
-'296','2008-04-08','12'
-'216','2008-04-08','12'
-'274','2008-04-08','12'
-'219','2008-04-08','12'
-'239','2008-04-08','12'
-'223','2008-04-08','12'
-'256','2008-04-08','12'
-'263','2008-04-08','12'
-'288','2008-04-08','12'
-'244','2008-04-08','12'
-'202','2008-04-08','12'
-'229','2008-04-08','12'
-'280','2008-04-08','12'
-'283','2008-04-08','12'
-'235','2008-04-08','12'
-'257','2008-04-08','12'
-'258','2008-04-08','12'
-'203','2008-04-08','12'
-'262','2008-04-08','12'
-'201','2008-04-08','12'
-'217','2008-04-08','12'
-'298','2008-04-08','12'
-'291','2008-04-08','12'
-'255','2008-04-08','12'
-'200','2008-04-08','12'
-'237','2008-04-08','12'
-'248','2008-04-08','12'
-'277','2008-04-08','12'
-'230','2008-04-08','12'
-'207','2008-04-08','12'
-'249','2008-04-08','12'
-'265','2008-04-08','12'
-'214','2008-04-08','12'
-'233','2008-04-08','12'
-'256','2008-04-08','12'
-'298','2008-04-08','12'
-'285','2008-04-08','12'
-'273','2008-04-08','12'
-'281','2008-04-08','12'
-'222','2008-04-08','12'
-'200','2008-04-08','12'
-103 rows selected 
->>>  dfs -cat ../build/ql/test/data/warehouse/dest4.out/*;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input14.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input14.q.out b/ql/src/test/results/beelinepositive/input14.q.out
deleted file mode 100644
index 08a7dd4..0000000
--- a/ql/src/test/results/beelinepositive/input14.q.out
+++ /dev/null
@@ -1,198 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input14.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input14.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmap:src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Transform Operator'
-'                command: cat'
-'                output info:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                Filter Operator'
-'                  predicate:'
-'                      expr: (_col0 < 100.0)'
-'                      type: boolean'
-'                  Reduce Output Operator'
-'                    key expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                    sort order: +'
-'                    Map-reduce partition columns:'
-'                          expr: _col0'
-'                          type: string'
-'                    tag: -1'
-'                    value expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                          expr: _col1'
-'                          type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            Select Operator'
-'              expressions:'
-'                    expr: UDFToInteger(_col0)'
-'                    type: int'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: input14.dest1'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input14.dest1'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-''
-''
-84 rows selected 
->>>  
->>>  FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'0','val_0'
-'0','val_0'
-'0','val_0'
-'10','val_10'
-'11','val_11'
-'12','val_12'
-'12','val_12'
-'15','val_15'
-'15','val_15'
-'17','val_17'
-'18','val_18'
-'18','val_18'
-'19','val_19'
-'2','val_2'
-'20','val_20'
-'24','val_24'
-'24','val_24'
-'26','val_26'
-'26','val_26'
-'27','val_27'
-'28','val_28'
-'30','val_30'
-'33','val_33'
-'34','val_34'
-'35','val_35'
-'35','val_35'
-'35','val_35'
-'37','val_37'
-'37','val_37'
-'4','val_4'
-'41','val_41'
-'42','val_42'
-'42','val_42'
-'43','val_43'
-'44','val_44'
-'47','val_47'
-'5','val_5'
-'5','val_5'
-'5','val_5'
-'51','val_51'
-'51','val_51'
-'53','val_53'
-'54','val_54'
-'57','val_57'
-'58','val_58'
-'58','val_58'
-'64','val_64'
-'65','val_65'
-'66','val_66'
-'67','val_67'
-'67','val_67'
-'69','val_69'
-'70','val_70'
-'70','val_70'
-'70','val_70'
-'72','val_72'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'76','val_76'
-'77','val_77'
-'78','val_78'
-'8','val_8'
-'80','val_80'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'86','val_86'
-'87','val_87'
-'9','val_9'
-'90','val_90'
-'90','val_90'
-'90','val_90'
-'92','val_92'
-'95','val_95'
-'95','val_95'
-'96','val_96'
-'97','val_97'
-'97','val_97'
-'98','val_98'
-'98','val_98'
-84 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input14_limit.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input14_limit.q.out b/ql/src/test/results/beelinepositive/input14_limit.q.out
deleted file mode 100644
index d53bcc1..0000000
--- a/ql/src/test/results/beelinepositive/input14_limit.q.out
+++ /dev/null
@@ -1,149 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input14_limit.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input14_limit.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey LIMIT 20 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-2 depends on stages: Stage-1'
-'  Stage-0 depends on stages: Stage-2'
-'  Stage-3 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmap:src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Transform Operator'
-'                command: cat'
-'                output info:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Limit'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-''
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              sort order: +'
-'              Map-reduce partition columns:'
-'                    expr: _col0'
-'                    type: string'
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Limit'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (_col0 < 100.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Select Operator'
-'                  expressions:'
-'                        expr: UDFToInteger(_col0)'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                  outputColumnNames: _col0, _col1'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 1'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.TextInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        name: input14_limit.dest1'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input14_limit.dest1'
-''
-'  Stage: Stage-3'
-'    Stats-Aggr Operator'
-''
-''
-114 rows selected 
->>>  
->>>  FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey LIMIT 20 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'0','val_0'
-'0','val_0'
-'0','val_0'
-'10','val_10'
-'11','val_11'
-5 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input15.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input15.q.out b/ql/src/test/results/beelinepositive/input15.q.out
deleted file mode 100644
index 656e6e1..0000000
--- a/ql/src/test/results/beelinepositive/input15.q.out
+++ /dev/null
@@ -1,37 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input15.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input15.q
->>>  EXPLAIN 
-CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_CREATETABLE (TOK_TABNAME TEST15) TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\t'))) TOK_TBLTEXTFILE)'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-0'
-'      Create Table Operator:'
-'        Create Table'
-'          columns: key int, value string'
-'          field delimiter: 	'
-'          if not exists: false'
-'          input format: org.apache.hadoop.mapred.TextInputFormat'
-'          # buckets: -1'
-'          output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
-'          name: TEST15'
-'          isExternal: false'
-''
-''
-20 rows selected 
->>>  
->>>  CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  DESCRIBE TEST15;
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-2 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input17.q.out b/ql/src/test/results/beelinepositive/input17.q.out
deleted file mode 100644
index 96aa07c..0000000
--- a/ql/src/test/results/beelinepositive/input17.q.out
+++ /dev/null
@@ -1,121 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input17.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input17.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM ( 
-FROM src_thrift 
-SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0]) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmap:src_thrift '
-'          TableScan'
-'            alias: src_thrift'
-'            Select Operator'
-'              expressions:'
-'                    expr: (aint + lint[0])'
-'                    type: int'
-'                    expr: lintstring[0]'
-'                    type: struct<myint:int,mystring:string,underscore_int:int>'
-'              outputColumnNames: _col0, _col1'
-'              Transform Operator'
-'                command: cat'
-'                output info:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            Select Operator'
-'              expressions:'
-'                    expr: UDFToInteger(_col0)'
-'                    type: int'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: input17.dest1'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input17.dest1'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-''
-''
-80 rows selected 
->>>  
->>>  FROM ( 
-FROM src_thrift 
-SELECT TRANSFORM(src_thrift.aint + src_thrift.lint[0], src_thrift.lintstring[0]) 
-USING 'cat' AS (tkey, tvalue) 
-CLUSTER BY tkey 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'','null'
-'-1461153966','{"myint":49,"mystring":"343","underscore_int":7}'
-'-1952710705','{"myint":25,"mystring":"125","underscore_int":5}'
-'-734328905','{"myint":16,"mystring":"64","underscore_int":4}'
-'-751827636','{"myint":4,"mystring":"8","underscore_int":2}'
-'1244525196','{"myint":36,"mystring":"216","underscore_int":6}'
-'1638581586','{"myint":64,"mystring":"512","underscore_int":8}'
-'1712634731','{"myint":0,"mystring":"0","underscore_int":0}'
-'336964422','{"myint":81,"mystring":"729","underscore_int":9}'
-'465985201','{"myint":1,"mystring":"1","underscore_int":1}'
-'477111225','{"myint":9,"mystring":"27","underscore_int":3}'
-11 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input18.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input18.q.out b/ql/src/test/results/beelinepositive/input18.q.out
deleted file mode 100644
index 80106ae..0000000
--- a/ql/src/test/results/beelinepositive/input18.q.out
+++ /dev/null
@@ -1,202 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input18.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input18.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) 
-USING 'cat' 
-CLUSTER BY key 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmap:src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'                    expr: (1 + 2)'
-'                    type: int'
-'                    expr: (3 + 4)'
-'                    type: int'
-'              outputColumnNames: _col0, _col1, _col2, _col3'
-'              Transform Operator'
-'                command: cat'
-'                output info:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                Filter Operator'
-'                  predicate:'
-'                      expr: (_col0 < 100.0)'
-'                      type: boolean'
-'                  Reduce Output Operator'
-'                    key expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                    sort order: +'
-'                    Map-reduce partition columns:'
-'                          expr: _col0'
-'                          type: string'
-'                    tag: -1'
-'                    value expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                          expr: _col1'
-'                          type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: regexp_replace(_col1, '	', '+')'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            Select Operator'
-'              expressions:'
-'                    expr: UDFToInteger(_col0)'
-'                    type: int'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: input18.dest1'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input18.dest1'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-''
-''
-88 rows selected 
->>>  
->>>  FROM ( 
-FROM src 
-SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) 
-USING 'cat' 
-CLUSTER BY key 
-) tmap 
-INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1;
-'key','value'
-'0','val_0+3+7'
-'0','val_0+3+7'
-'0','val_0+3+7'
-'10','val_10+3+7'
-'11','val_11+3+7'
-'12','val_12+3+7'
-'12','val_12+3+7'
-'15','val_15+3+7'
-'15','val_15+3+7'
-'17','val_17+3+7'
-'18','val_18+3+7'
-'18','val_18+3+7'
-'19','val_19+3+7'
-'2','val_2+3+7'
-'20','val_20+3+7'
-'24','val_24+3+7'
-'24','val_24+3+7'
-'26','val_26+3+7'
-'26','val_26+3+7'
-'27','val_27+3+7'
-'28','val_28+3+7'
-'30','val_30+3+7'
-'33','val_33+3+7'
-'34','val_34+3+7'
-'35','val_35+3+7'
-'35','val_35+3+7'
-'35','val_35+3+7'
-'37','val_37+3+7'
-'37','val_37+3+7'
-'4','val_4+3+7'
-'41','val_41+3+7'
-'42','val_42+3+7'
-'42','val_42+3+7'
-'43','val_43+3+7'
-'44','val_44+3+7'
-'47','val_47+3+7'
-'5','val_5+3+7'
-'5','val_5+3+7'
-'5','val_5+3+7'
-'51','val_51+3+7'
-'51','val_51+3+7'
-'53','val_53+3+7'
-'54','val_54+3+7'
-'57','val_57+3+7'
-'58','val_58+3+7'
-'58','val_58+3+7'
-'64','val_64+3+7'
-'65','val_65+3+7'
-'66','val_66+3+7'
-'67','val_67+3+7'
-'67','val_67+3+7'
-'69','val_69+3+7'
-'70','val_70+3+7'
-'70','val_70+3+7'
-'70','val_70+3+7'
-'72','val_72+3+7'
-'72','val_72+3+7'
-'74','val_74+3+7'
-'76','val_76+3+7'
-'76','val_76+3+7'
-'77','val_77+3+7'
-'78','val_78+3+7'
-'8','val_8+3+7'
-'80','val_80+3+7'
-'82','val_82+3+7'
-'83','val_83+3+7'
-'83','val_83+3+7'
-'84','val_84+3+7'
-'84','val_84+3+7'
-'85','val_85+3+7'
-'86','val_86+3+7'
-'87','val_87+3+7'
-'9','val_9+3+7'
-'90','val_90+3+7'
-'90','val_90+3+7'
-'90','val_90+3+7'
-'92','val_92+3+7'
-'95','val_95+3+7'
-'95','val_95+3+7'
-'96','val_96+3+7'
-'97','val_97+3+7'
-'97','val_97+3+7'
-'98','val_98+3+7'
-'98','val_98+3+7'
-84 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input19.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input19.q.out b/ql/src/test/results/beelinepositive/input19.q.out
deleted file mode 100644
index c1c4514..0000000
--- a/ql/src/test/results/beelinepositive/input19.q.out
+++ /dev/null
@@ -1,13 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input19.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input19.q
->>>  
->>>  create table apachelog(ipaddress STRING,identd STRING,user_name STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '("|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE;
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog;
-No rows affected 
->>>  SELECT a.* FROM apachelog a;
-'ipaddress','identd','user_name','finishtime','requestline','returncode','size'
-'127.0.0.1','','frank','10/Oct/2000:13:55:36 -0700','GET /apache_pb.gif HTTP/1.0','200','2326'
-1 row selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input1_limit.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input1_limit.q.out b/ql/src/test/results/beelinepositive/input1_limit.q.out
deleted file mode 100644
index 6add71b..0000000
--- a/ql/src/test/results/beelinepositive/input1_limit.q.out
+++ /dev/null
@@ -1,179 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input1_limit.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input1_limit.q
->>>  CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_LIMIT 10)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_LIMIT 5)))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-2 is a root stage'
-'  Stage-0 depends on stages: Stage-2'
-'  Stage-3 depends on stages: Stage-0'
-'  Stage-4 depends on stages: Stage-2'
-'  Stage-1 depends on stages: Stage-4'
-'  Stage-5 depends on stages: Stage-1'
-''
-'STAGE PLANS:'
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key < 100.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Limit'
-'                  Reduce Output Operator'
-'                    sort order: '
-'                    tag: -1'
-'                    value expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                          expr: _col1'
-'                          type: string'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key < 100.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Limit'
-'                  File Output Operator'
-'                    compressed: false'
-'                    GlobalTableId: 0'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Limit'
-'            Select Operator'
-'              expressions:'
-'                    expr: UDFToInteger(_col0)'
-'                    type: int'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 1'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: input1_limit.dest1'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input1_limit.dest1'
-''
-'  Stage: Stage-3'
-'    Stats-Aggr Operator'
-''
-'  Stage: Stage-4'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              sort order: '
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Limit'
-'            Select Operator'
-'              expressions:'
-'                    expr: UDFToInteger(_col0)'
-'                    type: int'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 2'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    name: input1_limit.dest2'
-''
-'  Stage: Stage-1'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: input1_limit.dest2'
-''
-'  Stage: Stage-5'
-'    Stats-Aggr Operator'
-''
-''
-133 rows selected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10 
-INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1 ORDER BY dest1.key ASC, dest1.value ASC;
-'key','value'
-'0','val_0'
-'15','val_15'
-'17','val_17'
-'27','val_27'
-'37','val_37'
-'57','val_57'
-'66','val_66'
-'82','val_82'
-'86','val_86'
-'98','val_98'
-10 rows selected 
->>>  SELECT dest2.* FROM dest2 ORDER BY dest2.key ASC, dest2.value ASC;
-'key','value'
-'27','val_27'
-'37','val_37'
-'66','val_66'
-'86','val_86'
-'98','val_98'
-5 rows selected 
->>>  
->>>  
->>>  
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/input2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/input2.q.out b/ql/src/test/results/beelinepositive/input2.q.out
deleted file mode 100644
index 18309e8..0000000
--- a/ql/src/test/results/beelinepositive/input2.q.out
+++ /dev/null
@@ -1,77 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/input2.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/input2.q
->>>  CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE TEST2a;
-'col_name','data_type','comment'
-'a','int',''
-'b','double',''
-2 rows selected 
->>>  DESC TEST2a;
-'col_name','data_type','comment'
-'a','int',''
-'b','double',''
-2 rows selected 
->>>  CREATE TABLE TEST2b(A ARRAY<INT>, B DOUBLE, C MAP<DOUBLE, INT>) STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE TEST2b;
-'col_name','data_type','comment'
-'a','array<int>',''
-'b','double',''
-'c','map<double,int>',''
-3 rows selected 
->>>  SHOW TABLES;
-'tab_name'
-'primitives'
-'src'
-'src1'
-'src_json'
-'src_sequencefile'
-'src_thrift'
-'srcbucket'
-'srcbucket2'
-'srcpart'
-'test2a'
-'test2b'
-11 rows selected 
->>>  DROP TABLE TEST2a;
-No rows affected 
->>>  SHOW TABLES;
-'tab_name'
-'primitives'
-'src'
-'src1'
-'src_json'
-'src_sequencefile'
-'src_thrift'
-'srcbucket'
-'srcbucket2'
-'srcpart'
-'test2b'
-10 rows selected 
->>>  DROP TABLE TEST2b;
-No rows affected 
->>>  
->>>  EXPLAIN 
-SHOW TABLES;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  TOK_SHOWTABLES'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-0 is a root stage'
-'  Stage-1 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-0'
-'      Show Table Operator:'
-'        Show Tables'
-'          database name: input2'
-''
-'  Stage: Stage-1'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-18 rows selected 
->>>  !record