You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2017/02/03 21:50:24 UTC

[11/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/having.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/having.q.out b/ql/src/test/results/beelinepositive/having.q.out
deleted file mode 100644
index ab2365d..0000000
--- a/ql/src/test/results/beelinepositive/having.q.out
+++ /dev/null
@@ -1,1251 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/having.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/having.q
->>>  EXPLAIN SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL value)) c)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (> (TOK_TABLE_OR_COL c) 3))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: count(value)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: key'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col1'
-'                        type: bigint'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Filter Operator'
-'            predicate:'
-'                expr: (_col1 > 3)'
-'                type: boolean'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col1'
-'                    type: bigint'
-'              outputColumnNames: _col0'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-73 rows selected 
->>>  SELECT count(value) AS c FROM src GROUP BY key HAVING c > 3;
-'c'
-'4'
-'4'
-'5'
-'4'
-'5'
-'5'
-'4'
-'4'
-'5'
-'4'
-10 rows selected 
->>>  
->>>  EXPLAIN SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL value)) c)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (!= (TOK_TABLE_OR_COL key) 302))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key <> 302.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: key, value'
-'                Group By Operator'
-'                  aggregations:'
-'                        expr: max(value)'
-'                  bucketGroup: false'
-'                  keys:'
-'                        expr: key'
-'                        type: string'
-'                  mode: hash'
-'                  outputColumnNames: _col0, _col1'
-'                  Reduce Output Operator'
-'                    key expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                    sort order: +'
-'                    Map-reduce partition columns:'
-'                          expr: _col0'
-'                          type: string'
-'                    tag: -1'
-'                    value expressions:'
-'                          expr: _col1'
-'                          type: string'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: max(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-75 rows selected 
->>>  SELECT key, max(value) AS c FROM src GROUP BY key HAVING key != 302;
-'key','c'
-'0','val_0'
-'10','val_10'
-'100','val_100'
-'103','val_103'
-'104','val_104'
-'105','val_105'
-'11','val_11'
-'111','val_111'
-'113','val_113'
-'114','val_114'
-'116','val_116'
-'118','val_118'
-'119','val_119'
-'12','val_12'
-'120','val_120'
-'125','val_125'
-'126','val_126'
-'128','val_128'
-'129','val_129'
-'131','val_131'
-'133','val_133'
-'134','val_134'
-'136','val_136'
-'137','val_137'
-'138','val_138'
-'143','val_143'
-'145','val_145'
-'146','val_146'
-'149','val_149'
-'15','val_15'
-'150','val_150'
-'152','val_152'
-'153','val_153'
-'155','val_155'
-'156','val_156'
-'157','val_157'
-'158','val_158'
-'160','val_160'
-'162','val_162'
-'163','val_163'
-'164','val_164'
-'165','val_165'
-'166','val_166'
-'167','val_167'
-'168','val_168'
-'169','val_169'
-'17','val_17'
-'170','val_170'
-'172','val_172'
-'174','val_174'
-'175','val_175'
-'176','val_176'
-'177','val_177'
-'178','val_178'
-'179','val_179'
-'18','val_18'
-'180','val_180'
-'181','val_181'
-'183','val_183'
-'186','val_186'
-'187','val_187'
-'189','val_189'
-'19','val_19'
-'190','val_190'
-'191','val_191'
-'192','val_192'
-'193','val_193'
-'194','val_194'
-'195','val_195'
-'196','val_196'
-'197','val_197'
-'199','val_199'
-'2','val_2'
-'20','val_20'
-'200','val_200'
-'201','val_201'
-'202','val_202'
-'203','val_203'
-'205','val_205'
-'207','val_207'
-'208','val_208'
-'209','val_209'
-'213','val_213'
-'214','val_214'
-'216','val_216'
-'217','val_217'
-'218','val_218'
-'219','val_219'
-'221','val_221'
-'222','val_222'
-'223','val_223'
-'224','val_224'
-'226','val_226'
-'228','val_228'
-'229','val_229'
-'230','val_230'
-'233','val_233'
-'235','val_235'
-'237','val_237'
-'238','val_238'
-'239','val_239'
-'24','val_24'
-'241','val_241'
-'242','val_242'
-'244','val_244'
-'247','val_247'
-'248','val_248'
-'249','val_249'
-'252','val_252'
-'255','val_255'
-'256','val_256'
-'257','val_257'
-'258','val_258'
-'26','val_26'
-'260','val_260'
-'262','val_262'
-'263','val_263'
-'265','val_265'
-'266','val_266'
-'27','val_27'
-'272','val_272'
-'273','val_273'
-'274','val_274'
-'275','val_275'
-'277','val_277'
-'278','val_278'
-'28','val_28'
-'280','val_280'
-'281','val_281'
-'282','val_282'
-'283','val_283'
-'284','val_284'
-'285','val_285'
-'286','val_286'
-'287','val_287'
-'288','val_288'
-'289','val_289'
-'291','val_291'
-'292','val_292'
-'296','val_296'
-'298','val_298'
-'30','val_30'
-'305','val_305'
-'306','val_306'
-'307','val_307'
-'308','val_308'
-'309','val_309'
-'310','val_310'
-'311','val_311'
-'315','val_315'
-'316','val_316'
-'317','val_317'
-'318','val_318'
-'321','val_321'
-'322','val_322'
-'323','val_323'
-'325','val_325'
-'327','val_327'
-'33','val_33'
-'331','val_331'
-'332','val_332'
-'333','val_333'
-'335','val_335'
-'336','val_336'
-'338','val_338'
-'339','val_339'
-'34','val_34'
-'341','val_341'
-'342','val_342'
-'344','val_344'
-'345','val_345'
-'348','val_348'
-'35','val_35'
-'351','val_351'
-'353','val_353'
-'356','val_356'
-'360','val_360'
-'362','val_362'
-'364','val_364'
-'365','val_365'
-'366','val_366'
-'367','val_367'
-'368','val_368'
-'369','val_369'
-'37','val_37'
-'373','val_373'
-'374','val_374'
-'375','val_375'
-'377','val_377'
-'378','val_378'
-'379','val_379'
-'382','val_382'
-'384','val_384'
-'386','val_386'
-'389','val_389'
-'392','val_392'
-'393','val_393'
-'394','val_394'
-'395','val_395'
-'396','val_396'
-'397','val_397'
-'399','val_399'
-'4','val_4'
-'400','val_400'
-'401','val_401'
-'402','val_402'
-'403','val_403'
-'404','val_404'
-'406','val_406'
-'407','val_407'
-'409','val_409'
-'41','val_41'
-'411','val_411'
-'413','val_413'
-'414','val_414'
-'417','val_417'
-'418','val_418'
-'419','val_419'
-'42','val_42'
-'421','val_421'
-'424','val_424'
-'427','val_427'
-'429','val_429'
-'43','val_43'
-'430','val_430'
-'431','val_431'
-'432','val_432'
-'435','val_435'
-'436','val_436'
-'437','val_437'
-'438','val_438'
-'439','val_439'
-'44','val_44'
-'443','val_443'
-'444','val_444'
-'446','val_446'
-'448','val_448'
-'449','val_449'
-'452','val_452'
-'453','val_453'
-'454','val_454'
-'455','val_455'
-'457','val_457'
-'458','val_458'
-'459','val_459'
-'460','val_460'
-'462','val_462'
-'463','val_463'
-'466','val_466'
-'467','val_467'
-'468','val_468'
-'469','val_469'
-'47','val_47'
-'470','val_470'
-'472','val_472'
-'475','val_475'
-'477','val_477'
-'478','val_478'
-'479','val_479'
-'480','val_480'
-'481','val_481'
-'482','val_482'
-'483','val_483'
-'484','val_484'
-'485','val_485'
-'487','val_487'
-'489','val_489'
-'490','val_490'
-'491','val_491'
-'492','val_492'
-'493','val_493'
-'494','val_494'
-'495','val_495'
-'496','val_496'
-'497','val_497'
-'498','val_498'
-'5','val_5'
-'51','val_51'
-'53','val_53'
-'54','val_54'
-'57','val_57'
-'58','val_58'
-'64','val_64'
-'65','val_65'
-'66','val_66'
-'67','val_67'
-'69','val_69'
-'70','val_70'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'77','val_77'
-'78','val_78'
-'8','val_8'
-'80','val_80'
-'82','val_82'
-'83','val_83'
-'84','val_84'
-'85','val_85'
-'86','val_86'
-'87','val_87'
-'9','val_9'
-'90','val_90'
-'92','val_92'
-'95','val_95'
-'96','val_96'
-'97','val_97'
-'98','val_98'
-308 rows selected 
->>>  
->>>  EXPLAIN SELECT key FROM src GROUP BY key HAVING max(value) > "val_255";
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (> (TOK_FUNCTION max (TOK_TABLE_OR_COL value)) "val_255"))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: max(value)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: key'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: max(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Filter Operator'
-'            predicate:'
-'                expr: (_col1 > 'val_255')'
-'                type: boolean'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              outputColumnNames: _col0'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-73 rows selected 
->>>  SELECT key FROM src GROUP BY key HAVING max(value) > "val_255";
-'key'
-'256'
-'257'
-'258'
-'26'
-'260'
-'262'
-'263'
-'265'
-'266'
-'27'
-'272'
-'273'
-'274'
-'275'
-'277'
-'278'
-'28'
-'280'
-'281'
-'282'
-'283'
-'284'
-'285'
-'286'
-'287'
-'288'
-'289'
-'291'
-'292'
-'296'
-'298'
-'30'
-'302'
-'305'
-'306'
-'307'
-'308'
-'309'
-'310'
-'311'
-'315'
-'316'
-'317'
-'318'
-'321'
-'322'
-'323'
-'325'
-'327'
-'33'
-'331'
-'332'
-'333'
-'335'
-'336'
-'338'
-'339'
-'34'
-'341'
-'342'
-'344'
-'345'
-'348'
-'35'
-'351'
-'353'
-'356'
-'360'
-'362'
-'364'
-'365'
-'366'
-'367'
-'368'
-'369'
-'37'
-'373'
-'374'
-'375'
-'377'
-'378'
-'379'
-'382'
-'384'
-'386'
-'389'
-'392'
-'393'
-'394'
-'395'
-'396'
-'397'
-'399'
-'4'
-'400'
-'401'
-'402'
-'403'
-'404'
-'406'
-'407'
-'409'
-'41'
-'411'
-'413'
-'414'
-'417'
-'418'
-'419'
-'42'
-'421'
-'424'
-'427'
-'429'
-'43'
-'430'
-'431'
-'432'
-'435'
-'436'
-'437'
-'438'
-'439'
-'44'
-'443'
-'444'
-'446'
-'448'
-'449'
-'452'
-'453'
-'454'
-'455'
-'457'
-'458'
-'459'
-'460'
-'462'
-'463'
-'466'
-'467'
-'468'
-'469'
-'47'
-'470'
-'472'
-'475'
-'477'
-'478'
-'479'
-'480'
-'481'
-'482'
-'483'
-'484'
-'485'
-'487'
-'489'
-'490'
-'491'
-'492'
-'493'
-'494'
-'495'
-'496'
-'497'
-'498'
-'5'
-'51'
-'53'
-'54'
-'57'
-'58'
-'64'
-'65'
-'66'
-'67'
-'69'
-'70'
-'72'
-'74'
-'76'
-'77'
-'78'
-'8'
-'80'
-'82'
-'83'
-'84'
-'85'
-'86'
-'87'
-'9'
-'90'
-'92'
-'95'
-'96'
-'97'
-'98'
-199 rows selected 
->>>  
->>>  EXPLAIN SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255";
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_WHERE (> (TOK_TABLE_OR_COL key) 300)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (> (TOK_FUNCTION max (TOK_TABLE_OR_COL value)) "val_255"))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key > 300.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: key, value'
-'                Group By Operator'
-'                  aggregations:'
-'                        expr: max(value)'
-'                  bucketGroup: false'
-'                  keys:'
-'                        expr: key'
-'                        type: string'
-'                  mode: hash'
-'                  outputColumnNames: _col0, _col1'
-'                  Reduce Output Operator'
-'                    key expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                    sort order: +'
-'                    Map-reduce partition columns:'
-'                          expr: _col0'
-'                          type: string'
-'                    tag: -1'
-'                    value expressions:'
-'                          expr: _col1'
-'                          type: string'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: max(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Filter Operator'
-'            predicate:'
-'                expr: (_col1 > 'val_255')'
-'                type: boolean'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              outputColumnNames: _col0'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-77 rows selected 
->>>  SELECT key FROM src where key > 300 GROUP BY key HAVING max(value) > "val_255";
-'key'
-'302'
-'305'
-'306'
-'307'
-'308'
-'309'
-'310'
-'311'
-'315'
-'316'
-'317'
-'318'
-'321'
-'322'
-'323'
-'325'
-'327'
-'331'
-'332'
-'333'
-'335'
-'336'
-'338'
-'339'
-'341'
-'342'
-'344'
-'345'
-'348'
-'351'
-'353'
-'356'
-'360'
-'362'
-'364'
-'365'
-'366'
-'367'
-'368'
-'369'
-'373'
-'374'
-'375'
-'377'
-'378'
-'379'
-'382'
-'384'
-'386'
-'389'
-'392'
-'393'
-'394'
-'395'
-'396'
-'397'
-'399'
-'400'
-'401'
-'402'
-'403'
-'404'
-'406'
-'407'
-'409'
-'411'
-'413'
-'414'
-'417'
-'418'
-'419'
-'421'
-'424'
-'427'
-'429'
-'430'
-'431'
-'432'
-'435'
-'436'
-'437'
-'438'
-'439'
-'443'
-'444'
-'446'
-'448'
-'449'
-'452'
-'453'
-'454'
-'455'
-'457'
-'458'
-'459'
-'460'
-'462'
-'463'
-'466'
-'467'
-'468'
-'469'
-'470'
-'472'
-'475'
-'477'
-'478'
-'479'
-'480'
-'481'
-'482'
-'483'
-'484'
-'485'
-'487'
-'489'
-'490'
-'491'
-'492'
-'493'
-'494'
-'495'
-'496'
-'497'
-'498'
-125 rows selected 
->>>  
->>>  EXPLAIN SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255";
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL value)))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (> (TOK_FUNCTION max (TOK_TABLE_OR_COL value)) "val_255"))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: max(value)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: key'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: max(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Filter Operator'
-'            predicate:'
-'                expr: (_col1 > 'val_255')'
-'                type: boolean'
-'            Select Operator'
-'              expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-75 rows selected 
->>>  SELECT key, max(value) FROM src GROUP BY key HAVING max(value) > "val_255";
-'key','_c1'
-'256','val_256'
-'257','val_257'
-'258','val_258'
-'26','val_26'
-'260','val_260'
-'262','val_262'
-'263','val_263'
-'265','val_265'
-'266','val_266'
-'27','val_27'
-'272','val_272'
-'273','val_273'
-'274','val_274'
-'275','val_275'
-'277','val_277'
-'278','val_278'
-'28','val_28'
-'280','val_280'
-'281','val_281'
-'282','val_282'
-'283','val_283'
-'284','val_284'
-'285','val_285'
-'286','val_286'
-'287','val_287'
-'288','val_288'
-'289','val_289'
-'291','val_291'
-'292','val_292'
-'296','val_296'
-'298','val_298'
-'30','val_30'
-'302','val_302'
-'305','val_305'
-'306','val_306'
-'307','val_307'
-'308','val_308'
-'309','val_309'
-'310','val_310'
-'311','val_311'
-'315','val_315'
-'316','val_316'
-'317','val_317'
-'318','val_318'
-'321','val_321'
-'322','val_322'
-'323','val_323'
-'325','val_325'
-'327','val_327'
-'33','val_33'
-'331','val_331'
-'332','val_332'
-'333','val_333'
-'335','val_335'
-'336','val_336'
-'338','val_338'
-'339','val_339'
-'34','val_34'
-'341','val_341'
-'342','val_342'
-'344','val_344'
-'345','val_345'
-'348','val_348'
-'35','val_35'
-'351','val_351'
-'353','val_353'
-'356','val_356'
-'360','val_360'
-'362','val_362'
-'364','val_364'
-'365','val_365'
-'366','val_366'
-'367','val_367'
-'368','val_368'
-'369','val_369'
-'37','val_37'
-'373','val_373'
-'374','val_374'
-'375','val_375'
-'377','val_377'
-'378','val_378'
-'379','val_379'
-'382','val_382'
-'384','val_384'
-'386','val_386'
-'389','val_389'
-'392','val_392'
-'393','val_393'
-'394','val_394'
-'395','val_395'
-'396','val_396'
-'397','val_397'
-'399','val_399'
-'4','val_4'
-'400','val_400'
-'401','val_401'
-'402','val_402'
-'403','val_403'
-'404','val_404'
-'406','val_406'
-'407','val_407'
-'409','val_409'
-'41','val_41'
-'411','val_411'
-'413','val_413'
-'414','val_414'
-'417','val_417'
-'418','val_418'
-'419','val_419'
-'42','val_42'
-'421','val_421'
-'424','val_424'
-'427','val_427'
-'429','val_429'
-'43','val_43'
-'430','val_430'
-'431','val_431'
-'432','val_432'
-'435','val_435'
-'436','val_436'
-'437','val_437'
-'438','val_438'
-'439','val_439'
-'44','val_44'
-'443','val_443'
-'444','val_444'
-'446','val_446'
-'448','val_448'
-'449','val_449'
-'452','val_452'
-'453','val_453'
-'454','val_454'
-'455','val_455'
-'457','val_457'
-'458','val_458'
-'459','val_459'
-'460','val_460'
-'462','val_462'
-'463','val_463'
-'466','val_466'
-'467','val_467'
-'468','val_468'
-'469','val_469'
-'47','val_47'
-'470','val_470'
-'472','val_472'
-'475','val_475'
-'477','val_477'
-'478','val_478'
-'479','val_479'
-'480','val_480'
-'481','val_481'
-'482','val_482'
-'483','val_483'
-'484','val_484'
-'485','val_485'
-'487','val_487'
-'489','val_489'
-'490','val_490'
-'491','val_491'
-'492','val_492'
-'493','val_493'
-'494','val_494'
-'495','val_495'
-'496','val_496'
-'497','val_497'
-'498','val_498'
-'5','val_5'
-'51','val_51'
-'53','val_53'
-'54','val_54'
-'57','val_57'
-'58','val_58'
-'64','val_64'
-'65','val_65'
-'66','val_66'
-'67','val_67'
-'69','val_69'
-'70','val_70'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'77','val_77'
-'78','val_78'
-'8','val_8'
-'80','val_80'
-'82','val_82'
-'83','val_83'
-'84','val_84'
-'85','val_85'
-'86','val_86'
-'87','val_87'
-'9','val_9'
-'90','val_90'
-'92','val_92'
-'95','val_95'
-'96','val_96'
-'97','val_97'
-'98','val_98'
-199 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/hook_context_cs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/hook_context_cs.q.out b/ql/src/test/results/beelinepositive/hook_context_cs.q.out
deleted file mode 100644
index 516c6a9..0000000
--- a/ql/src/test/results/beelinepositive/hook_context_cs.q.out
+++ /dev/null
@@ -1,30 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/hook_context_cs.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/hook_context_cs.q
->>>  drop table vcsc;
-No rows affected 
->>>  CREATE TABLE vcsc (c STRING) PARTITIONED BY (ds STRING);
-No rows affected 
->>>  ALTER TABLE vcsc ADD partition (ds='dummy') location '${system:test.tmp.dir}/VerifyContentSummaryCacheHook';
-No rows affected 
->>>  
->>>  set hive.exec.pre.hooks=org.apache.hadoop.hive.ql.hooks.VerifyContentSummaryCacheHook;
-No rows affected 
->>>  SELECT a.c, b.c FROM vcsc a JOIN vcsc b ON a.ds = 'dummy' AND b.ds = 'dummy' AND a.c = b.c;
-'c','c'
-No rows selected 
->>>  
->>>  set mapred.job.tracker=local;
-No rows affected 
->>>  set hive.exec.pre.hooks = ;
-No rows affected 
->>>  set hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.VerifyContentSummaryCacheHook;
-No rows affected 
->>>  SELECT a.c, b.c FROM vcsc a JOIN vcsc b ON a.ds = 'dummy' AND b.ds = 'dummy' AND a.c = b.c;
-'c','c'
-No rows selected 
->>>  
->>>  set hive.exec.post.hooks=;
-No rows affected 
->>>  drop table vcsc;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/hook_order.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/hook_order.q.out b/ql/src/test/results/beelinepositive/hook_order.q.out
deleted file mode 100644
index f1d8e4c..0000000
--- a/ql/src/test/results/beelinepositive/hook_order.q.out
+++ /dev/null
@@ -1,25 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/hook_order.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/hook_order.q
->>>  SET hive.exec.pre.hooks=org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunFirst,org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunSecond;
-No rows affected 
->>>  SET hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunFirst,org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunSecond;
-No rows affected 
->>>  SET hive.semantic.analyzer.hook=org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunFirstSemanticAnalysisHook,org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunSecondSemanticAnalysisHook;
-No rows affected 
->>>  SET hive.exec.driver.run.hooks=org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunFirstDriverRunHook,org.apache.hadoop.hive.ql.hooks.VerifyHooksRunInOrder$RunSecondDriverRunHook;
-No rows affected 
->>>  
->>>  SELECT count(*) FROM src;
-'_c0'
-'500'
-1 row selected 
->>>  
->>>  SET hive.exec.pre.hooks=;
-No rows affected 
->>>  SET hive.exec.post.hooks=;
-No rows affected 
->>>  SET hive.semantic.analyzer.hook=;
-No rows affected 
->>>  SET hive.exec.driver.run.hooks=;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/implicit_cast1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/implicit_cast1.q.out b/ql/src/test/results/beelinepositive/implicit_cast1.q.out
deleted file mode 100644
index a37c32b..0000000
--- a/ql/src/test/results/beelinepositive/implicit_cast1.q.out
+++ /dev/null
@@ -1,58 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/implicit_cast1.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/implicit_cast1.q
->>>  CREATE TABLE implicit_test1(a BIGINT, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES('serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol') STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  EXPLAIN 
-SELECT implicit_test1.* 
-FROM implicit_test1 
-WHERE implicit_test1.a <> 0;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME implicit_test1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME implicit_test1)))) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL implicit_test1) a) 0))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        implicit_test1 '
-'          TableScan'
-'            alias: implicit_test1'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (a <> 0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: a'
-'                      type: bigint'
-'                      expr: b'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 0'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-37 rows selected 
->>>  
->>>  SELECT implicit_test1.* 
-FROM implicit_test1 
-WHERE implicit_test1.a <> 0;
-'a','b'
-No rows selected 
->>>  
->>>  
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/index_auto_file_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/index_auto_file_format.q.out b/ql/src/test/results/beelinepositive/index_auto_file_format.q.out
deleted file mode 100644
index dab5389..0000000
--- a/ql/src/test/results/beelinepositive/index_auto_file_format.q.out
+++ /dev/null
@@ -1,301 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/index_auto_file_format.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/index_auto_file_format.q
->>>  -- test automatic use of index on different file formats
->>>  CREATE INDEX src_index ON TABLE src(key) as 'COMPACT' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX src_index ON src REBUILD;
-No rows affected 
->>>  
->>>  SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-No rows affected 
->>>  SET hive.optimize.index.filter=true;
-No rows affected 
->>>  SET hive.optimize.index.filter.compact.minsize=0;
-No rows affected 
->>>  
->>>  EXPLAIN SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 86)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-3 is a root stage'
-'  Stage-8 depends on stages: Stage-3 , consists of Stage-5, Stage-4, Stage-6'
-'  Stage-5'
-'  Stage-2 depends on stages: Stage-5, Stage-4, Stage-7'
-'  Stage-1 depends on stages: Stage-2'
-'  Stage-4'
-'  Stage-6'
-'  Stage-7 depends on stages: Stage-6'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        index_auto_file_format__src_src_index__ '
-'          TableScan'
-'            alias: index_auto_file_format__src_src_index__'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offsets'
-'                      type: array<bigint>'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-8'
-'    Conditional Operator'
-''
-'  Stage: Stage-5'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-4'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-6'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-7'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-130 rows selected 
->>>  SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'key','value'
-'86','val_86'
-1 row selected 
->>>  
->>>  SET hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
-No rows affected 
->>>  SET hive.optimize.index.filter=true;
-No rows affected 
->>>  SET hive.optimize.index.filter.compact.minsize=0;
-No rows affected 
->>>  
->>>  EXPLAIN SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 86)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-3 is a root stage'
-'  Stage-8 depends on stages: Stage-3 , consists of Stage-5, Stage-4, Stage-6'
-'  Stage-5'
-'  Stage-2 depends on stages: Stage-5, Stage-4, Stage-7'
-'  Stage-1 depends on stages: Stage-2'
-'  Stage-4'
-'  Stage-6'
-'  Stage-7 depends on stages: Stage-6'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        index_auto_file_format__src_src_index__ '
-'          TableScan'
-'            alias: index_auto_file_format__src_src_index__'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offsets'
-'                      type: array<bigint>'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-8'
-'    Conditional Operator'
-''
-'  Stage: Stage-5'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-4'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-6'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-7'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-130 rows selected 
->>>  SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'key','value'
-'86','val_86'
-1 row selected 
->>>  
->>>  DROP INDEX src_index on src;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/index_auto_mult_tables.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/index_auto_mult_tables.q.out b/ql/src/test/results/beelinepositive/index_auto_mult_tables.q.out
deleted file mode 100644
index e0ce5dd..0000000
--- a/ql/src/test/results/beelinepositive/index_auto_mult_tables.q.out
+++ /dev/null
@@ -1,530 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/index_auto_mult_tables.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/index_auto_mult_tables.q
->>>  -- try the query without indexing, with manual indexing, and with automatic indexing
->>>  
->>>  -- without indexing
->>>  EXPLAIN SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME srcpart) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 80) (< (. (TOK_TABLE_OR_COL a) key) 100)) (> (. (TOK_TABLE_OR_COL b) key) 70)) (< (. (TOK_TABLE_OR_COL b) key) 90))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-2 depends on stages: Stage-1'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a '
-'          TableScan'
-'            alias: a'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 0'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'        b '
-'          TableScan'
-'            alias: b'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 1'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'      Reduce Operator Tree:'
-'        Join Operator'
-'          condition map:'
-'               Inner Join 0 to 1'
-'          condition expressions:'
-'            0 {VALUE._col0} {VALUE._col1}'
-'            1 {VALUE._col0}'
-'          handleSkewJoin: false'
-'          outputColumnNames: _col0, _col1, _col4'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-''
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              sort order: +'
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-104 rows selected 
->>>  SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'key','value'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-48 rows selected 
->>>  
->>>  
->>>  CREATE INDEX src_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX src_index ON src REBUILD;
-No rows affected 
->>>  
->>>  CREATE INDEX srcpart_index ON TABLE srcpart(key) as 'BITMAP' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX srcpart_index ON srcpart REBUILD;
-No rows affected 
->>>  
->>>  SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-No rows affected 
->>>  SET hive.optimize.index.filter=true;
-No rows affected 
->>>  SET hive.optimize.index.filter.compact.minsize=0;
-No rows affected 
->>>  
->>>  EXPLAIN SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME srcpart) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 80) (< (. (TOK_TABLE_OR_COL a) key) 100)) (> (. (TOK_TABLE_OR_COL b) key) 70)) (< (. (TOK_TABLE_OR_COL b) key) 90))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-5 is a root stage'
-'  Stage-4 depends on stages: Stage-5'
-'  Stage-1 depends on stages: Stage-4, Stage-6'
-'  Stage-2 depends on stages: Stage-1'
-'  Stage-7 is a root stage'
-'  Stage-6 depends on stages: Stage-7'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-5'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmp_index:ind0:index_auto_mult_tables__srcpart_srcpart_index__ '
-'          TableScan'
-'            alias: index_auto_mult_tables__srcpart_srcpart_index__'
-'            filterExpr:'
-'                expr: (((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offset'
-'                      type: bigint'
-'                      expr: _bitmaps'
-'                      type: array<bigint>'
-'                outputColumnNames: _col1, _col2, _col3'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col1'
-'                        type: string'
-'                        expr: _col2'
-'                        type: bigint'
-'                  outputColumnNames: _col0, _col1'
-'                  Select Operator'
-'                    expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                          expr: _col1'
-'                          type: bigint'
-'                    outputColumnNames: _col0, _col1'
-'                    Group By Operator'
-'                      aggregations:'
-'                            expr: collect_set(_col1)'
-'                      bucketGroup: false'
-'                      keys:'
-'                            expr: _col0'
-'                            type: string'
-'                      mode: hash'
-'                      outputColumnNames: _col0, _col1'
-'                      Reduce Output Operator'
-'                        key expressions:'
-'                              expr: _col0'
-'                              type: string'
-'                        sort order: +'
-'                        Map-reduce partition columns:'
-'                              expr: _col0'
-'                              type: string'
-'                        tag: -1'
-'                        value expressions:'
-'                              expr: _col1'
-'                              type: array<bigint>'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: collect_set(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: array<bigint>'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-4'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a '
-'          TableScan'
-'            alias: a'
-'            filterExpr:'
-'                expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 0'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'        b '
-'          TableScan'
-'            alias: b'
-'            filterExpr:'
-'                expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 1'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'      Reduce Operator Tree:'
-'        Join Operator'
-'          condition map:'
-'               Inner Join 0 to 1'
-'          condition expressions:'
-'            0 {VALUE._col0} {VALUE._col1}'
-'            1 {VALUE._col0}'
-'          handleSkewJoin: false'
-'          outputColumnNames: _col0, _col1, _col4'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-''
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              sort order: +'
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-7'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        tmp_index:ind0:index_auto_mult_tables__src_src_index__ '
-'          TableScan'
-'            alias: index_auto_mult_tables__src_src_index__'
-'            filterExpr:'
-'                expr: (((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0)) and (not EWAH_BITMAP_EMPTY(_bitmaps)))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offset'
-'                      type: bigint'
-'                      expr: _bitmaps'
-'                      type: array<bigint>'
-'                outputColumnNames: _col1, _col2, _col3'
-'                Select Operator'
-'                  expressions:'
-'                        expr: _col1'
-'                        type: string'
-'                        expr: _col2'
-'                        type: bigint'
-'                  outputColumnNames: _col0, _col1'
-'                  Select Operator'
-'                    expressions:'
-'                          expr: _col0'
-'                          type: string'
-'                          expr: _col1'
-'                          type: bigint'
-'                    outputColumnNames: _col0, _col1'
-'                    Group By Operator'
-'                      aggregations:'
-'                            expr: collect_set(_col1)'
-'                      bucketGroup: false'
-'                      keys:'
-'                            expr: _col0'
-'                            type: string'
-'                      mode: hash'
-'                      outputColumnNames: _col0, _col1'
-'                      Reduce Output Operator'
-'                        key expressions:'
-'                              expr: _col0'
-'                              type: string'
-'                        sort order: +'
-'                        Map-reduce partition columns:'
-'                              expr: _col0'
-'                              type: string'
-'                        tag: -1'
-'                        value expressions:'
-'                              expr: _col1'
-'                              type: array<bigint>'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: collect_set(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: array<bigint>'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-6'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-288 rows selected 
->>>  SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'key','value'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-48 rows selected 
->>>  
->>>  DROP INDEX src_index on src;
-No rows affected 
->>>  DROP INDEX srcpart_index on src;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/index_auto_mult_tables_compact.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/index_auto_mult_tables_compact.q.out b/ql/src/test/results/beelinepositive/index_auto_mult_tables_compact.q.out
deleted file mode 100644
index bc4c9dd..0000000
--- a/ql/src/test/results/beelinepositive/index_auto_mult_tables_compact.q.out
+++ /dev/null
@@ -1,507 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/index_auto_mult_tables_compact.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/index_auto_mult_tables_compact.q
->>>  -- try the query without indexing, with manual indexing, and with automatic indexing
->>>  
->>>  -- without indexing
->>>  EXPLAIN SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME srcpart) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 80) (< (. (TOK_TABLE_OR_COL a) key) 100)) (> (. (TOK_TABLE_OR_COL b) key) 70)) (< (. (TOK_TABLE_OR_COL b) key) 90))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-2 depends on stages: Stage-1'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a '
-'          TableScan'
-'            alias: a'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 0'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'        b '
-'          TableScan'
-'            alias: b'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 1'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'      Reduce Operator Tree:'
-'        Join Operator'
-'          condition map:'
-'               Inner Join 0 to 1'
-'          condition expressions:'
-'            0 {VALUE._col0} {VALUE._col1}'
-'            1 {VALUE._col0}'
-'          handleSkewJoin: false'
-'          outputColumnNames: _col0, _col1, _col4'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-''
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              sort order: +'
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-104 rows selected 
->>>  SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'key','value'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-48 rows selected 
->>>  
->>>  
->>>  CREATE INDEX src_index ON TABLE src(key) as 'COMPACT' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX src_index ON src REBUILD;
-No rows affected 
->>>  
->>>  CREATE INDEX srcpart_index ON TABLE srcpart(key) as 'COMPACT' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX srcpart_index ON srcpart REBUILD;
-No rows affected 
->>>  
->>>  SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-No rows affected 
->>>  SET hive.optimize.index.filter=true;
-No rows affected 
->>>  SET hive.optimize.index.filter.compact.minsize=0;
-No rows affected 
->>>  
->>>  -- automatic indexing
->>>  EXPLAIN SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME srcpart) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 80) (< (. (TOK_TABLE_OR_COL a) key) 100)) (> (. (TOK_TABLE_OR_COL b) key) 70)) (< (. (TOK_TABLE_OR_COL b) key) 90))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-5 is a root stage'
-'  Stage-10 depends on stages: Stage-5 , consists of Stage-7, Stage-6, Stage-8'
-'  Stage-7'
-'  Stage-4 depends on stages: Stage-7, Stage-6, Stage-9'
-'  Stage-1 depends on stages: Stage-4, Stage-11'
-'  Stage-2 depends on stages: Stage-1'
-'  Stage-6'
-'  Stage-8'
-'  Stage-9 depends on stages: Stage-8'
-'  Stage-12 is a root stage'
-'  Stage-17 depends on stages: Stage-12 , consists of Stage-14, Stage-13, Stage-15'
-'  Stage-14'
-'  Stage-11 depends on stages: Stage-14, Stage-13, Stage-16'
-'  Stage-13'
-'  Stage-15'
-'  Stage-16 depends on stages: Stage-15'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-5'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        index_auto_mult_tables_compact__srcpart_srcpart_index__ '
-'          TableScan'
-'            alias: index_auto_mult_tables_compact__srcpart_srcpart_index__'
-'            filterExpr:'
-'                expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offsets'
-'                      type: array<bigint>'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-10'
-'    Conditional Operator'
-''
-'  Stage: Stage-7'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-4'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a '
-'          TableScan'
-'            alias: a'
-'            filterExpr:'
-'                expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 0'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'        b '
-'          TableScan'
-'            alias: b'
-'            filterExpr:'
-'                expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 70.0) and (key < 90.0)) and (key > 80.0)) and (key < 100.0))'
-'                  type: boolean'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: key'
-'                      type: string'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: key'
-'                      type: string'
-'                tag: 1'
-'                value expressions:'
-'                      expr: key'
-'                      type: string'
-'      Reduce Operator Tree:'
-'        Join Operator'
-'          condition map:'
-'               Inner Join 0 to 1'
-'          condition expressions:'
-'            0 {VALUE._col0} {VALUE._col1}'
-'            1 {VALUE._col0}'
-'          handleSkewJoin: false'
-'          outputColumnNames: _col0, _col1, _col4'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-''
-'  Stage: Stage-2'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            Reduce Output Operator'
-'              key expressions:'
-'                    expr: _col0'
-'                    type: string'
-'              sort order: +'
-'              tag: -1'
-'              value expressions:'
-'                    expr: _col0'
-'                    type: string'
-'                    expr: _col1'
-'                    type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-6'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-8'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-9'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-12'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        index_auto_mult_tables_compact__src_src_index__ '
-'          TableScan'
-'            alias: index_auto_mult_tables_compact__src_src_index__'
-'            filterExpr:'
-'                expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: ((((key > 80.0) and (key < 100.0)) and (key > 70.0)) and (key < 90.0))'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offsets'
-'                      type: array<bigint>'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-17'
-'    Conditional Operator'
-''
-'  Stage: Stage-14'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-11'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-13'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-15'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-16'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-264 rows selected 
->>>  SELECT a.key, a.value FROM src a JOIN srcpart b ON (a.key = b.key) WHERE a.key > 80 AND a.key < 100 AND b.key > 70 AND b.key < 90 ORDER BY a.key;
-'key','value'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'85','val_85'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'86','val_86'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-'87','val_87'
-48 rows selected 
->>>  
->>>  DROP INDEX src_index on src;
-No rows affected 
->>>  DROP INDEX srcpart_index on src;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/index_auto_multiple.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/index_auto_multiple.q.out b/ql/src/test/results/beelinepositive/index_auto_multiple.q.out
deleted file mode 100644
index 000680b..0000000
--- a/ql/src/test/results/beelinepositive/index_auto_multiple.q.out
+++ /dev/null
@@ -1,163 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/index_auto_multiple.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/index_auto_multiple.q
->>>  -- With multiple indexes, make sure we choose which to use in a consistent order
->>>  
->>>  CREATE INDEX src_key_index ON TABLE src(key) as 'COMPACT' WITH DEFERRED REBUILD;
-No rows affected 
->>>  CREATE INDEX src_val_index ON TABLE src(value) as 'COMPACT' WITH DEFERRED REBUILD;
-No rows affected 
->>>  ALTER INDEX src_key_index ON src REBUILD;
-No rows affected 
->>>  ALTER INDEX src_val_index ON src REBUILD;
-No rows affected 
->>>  
->>>  SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-No rows affected 
->>>  SET hive.optimize.index.filter=true;
-No rows affected 
->>>  SET hive.optimize.index.filter.compact.minsize=0;
-No rows affected 
->>>  
->>>  EXPLAIN SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 86)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-3 is a root stage'
-'  Stage-8 depends on stages: Stage-3 , consists of Stage-5, Stage-4, Stage-6'
-'  Stage-5'
-'  Stage-2 depends on stages: Stage-5, Stage-4, Stage-7'
-'  Stage-1 depends on stages: Stage-2'
-'  Stage-4'
-'  Stage-6'
-'  Stage-7 depends on stages: Stage-6'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        index_auto_multiple__src_src_key_index__ '
-'          TableScan'
-'            alias: index_auto_multiple__src_src_key_index__'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: _bucketname'
-'                      type: string'
-'                      expr: _offsets'
-'                      type: array<bigint>'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-8'
-'    Conditional Operator'
-''
-'  Stage: Stage-5'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            filterExpr:'
-'                expr: (key = 86.0)'
-'                type: boolean'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (key = 86.0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: string'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-4'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-6'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        file:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-7'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          destination: file:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-130 rows selected 
->>>  SELECT key, value FROM src WHERE key=86 ORDER BY key;
-'key','value'
-'86','val_86'
-1 row selected 
->>>  
->>>  DROP INDEX src_key_index ON src;
-No rows affected 
->>>  DROP INDEX src_val_index ON src;
-No rows affected 
->>>  !record