You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2017/02/03 21:50:38 UTC

[25/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/combine3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/combine3.q.out b/ql/src/test/results/beelinepositive/combine3.q.out
deleted file mode 100644
index 82d91ad..0000000
--- a/ql/src/test/results/beelinepositive/combine3.q.out
+++ /dev/null
@@ -1,148 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/combine3.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/combine3.q
->>>  set hive.exec.compress.output = true;
-No rows affected 
->>>  set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
-No rows affected 
->>>  set mapred.min.split.size=256;
-No rows affected 
->>>  set mapred.min.split.size.per.node=256;
-No rows affected 
->>>  set mapred.min.split.size.per.rack=256;
-No rows affected 
->>>  set mapred.max.split.size=256;
-No rows affected 
->>>  
->>>  
->>>  drop table combine_3_srcpart_seq_rc;
-No rows affected 
->>>  
->>>  create table combine_3_srcpart_seq_rc (key int , value string) partitioned by (ds string, hr string) stored as sequencefile;
-No rows affected 
->>>  
->>>  insert overwrite table combine_3_srcpart_seq_rc partition (ds="2010-08-03", hr="00") select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  alter table combine_3_srcpart_seq_rc set fileformat rcfile;
-No rows affected 
->>>  insert overwrite table combine_3_srcpart_seq_rc partition (ds="2010-08-03", hr="001") select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  desc extended combine_3_srcpart_seq_rc partition(ds="2010-08-03", hr="00");
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-'ds','string',''
-'hr','string',''
-'','',''
-'Detailed Partition Information','Partition(values:[2010-08-03, 00], dbName:combine3, tableName:combine_3_srcpart_seq_rc, createTime:!!UNIXTIME!!, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/combine3.db/combine_3_srcpart_seq_rc/ds=2010-08-03/hr=00, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), parameters:{numFiles=1, transient_lastDdlTime=!!UNIXTIME!!, num
 Rows=500, totalSize=15250, rawDataSize=5312})',''
-6 rows selected 
->>>  desc extended combine_3_srcpart_seq_rc partition(ds="2010-08-03", hr="001");
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-'ds','string',''
-'hr','string',''
-'','',''
-'Detailed Partition Information','Partition(values:[2010-08-03, 001], dbName:combine3, tableName:combine_3_srcpart_seq_rc, createTime:!!UNIXTIME!!, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/combine3.db/combine_3_srcpart_seq_rc/ds=2010-08-03/hr=001, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), parameters:{numFiles=1, transient_lastDdlTime=!!UNIXTIME!!, numRows=500
 , totalSize=1981, rawDataSize=4812})',''
-6 rows selected 
->>>  
->>>  select key, value, ds, hr from combine_3_srcpart_seq_rc where ds="2010-08-03" order by key, hr limit 30;
-'key','value','ds','hr'
-'0','val_0','2010-08-03','00'
-'0','val_0','2010-08-03','00'
-'0','val_0','2010-08-03','00'
-'0','val_0','2010-08-03','001'
-'0','val_0','2010-08-03','001'
-'0','val_0','2010-08-03','001'
-'2','val_2','2010-08-03','00'
-'2','val_2','2010-08-03','001'
-'4','val_4','2010-08-03','00'
-'4','val_4','2010-08-03','001'
-'5','val_5','2010-08-03','00'
-'5','val_5','2010-08-03','00'
-'5','val_5','2010-08-03','00'
-'5','val_5','2010-08-03','001'
-'5','val_5','2010-08-03','001'
-'5','val_5','2010-08-03','001'
-'8','val_8','2010-08-03','00'
-'8','val_8','2010-08-03','001'
-'9','val_9','2010-08-03','00'
-'9','val_9','2010-08-03','001'
-'10','val_10','2010-08-03','00'
-'10','val_10','2010-08-03','001'
-'11','val_11','2010-08-03','00'
-'11','val_11','2010-08-03','001'
-'12','val_12','2010-08-03','00'
-'12','val_12','2010-08-03','00'
-'12','val_12','2010-08-03','001'
-'12','val_12','2010-08-03','001'
-'15','val_15','2010-08-03','00'
-'15','val_15','2010-08-03','00'
-30 rows selected 
->>>  
->>>  set hive.enforce.bucketing = true;
-No rows affected 
->>>  set hive.exec.reducers.max = 1;
-No rows affected 
->>>  
->>>  drop table bucket3_1;
-No rows affected 
->>>  CREATE TABLE combine_3_srcpart_seq_rc_bucket(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS stored as sequencefile;
-No rows affected 
->>>  
->>>  insert overwrite table combine_3_srcpart_seq_rc_bucket partition (ds='1') 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  alter table combine_3_srcpart_seq_rc_bucket set fileformat rcfile;
-No rows affected 
->>>  
->>>  insert overwrite table combine_3_srcpart_seq_rc_bucket partition (ds='11') 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  select key, ds from combine_3_srcpart_seq_rc_bucket tablesample (bucket 1 out of 2) s where ds = '1' or ds= '11' order by key, ds limit 30;
-'key','ds'
-'0','1'
-'0','1'
-'0','1'
-'0','11'
-'0','11'
-'0','11'
-'2','1'
-'2','11'
-'4','1'
-'4','11'
-'8','1'
-'8','11'
-'10','1'
-'10','11'
-'12','1'
-'12','1'
-'12','11'
-'12','11'
-'18','1'
-'18','1'
-'18','11'
-'18','11'
-'20','1'
-'20','11'
-'24','1'
-'24','1'
-'24','11'
-'24','11'
-'26','1'
-'26','1'
-30 rows selected 
->>>  
->>>  drop table combine_3_srcpart_seq_rc_bucket;
-No rows affected 
->>>  
->>>  drop table combine_3_srcpart_seq_rc;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/concatenate_inherit_table_location.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/concatenate_inherit_table_location.q.out b/ql/src/test/results/beelinepositive/concatenate_inherit_table_location.q.out
deleted file mode 100644
index ba2201d..0000000
--- a/ql/src/test/results/beelinepositive/concatenate_inherit_table_location.q.out
+++ /dev/null
@@ -1,37 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/concatenate_inherit_table_location.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/concatenate_inherit_table_location.q
->>>  CREATE TABLE citl_table (key STRING, value STRING) PARTITIONED BY (part STRING) 
-STORED AS RCFILE 
-LOCATION 'pfile:${system:test.tmp.dir}/citl_table';
-No rows affected 
->>>  
->>>  SET hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.VerifyPartitionIsSubdirectoryOfTableHook;
-No rows affected 
->>>  
->>>  INSERT OVERWRITE TABLE citl_table PARTITION (part = '1') SELECT * FROM src;
-'key','value'
-No rows selected 
->>>  
->>>  SET hive.exec.post.hooks=;
-No rows affected 
->>>  
->>>  ALTER TABLE citl_table SET LOCATION 'file:${system:test.tmp.dir}/citl_table';
-No rows affected 
->>>  
->>>  ALTER TABLE citl_table PARTITION (part = '1') CONCATENATE;
-No rows affected 
->>>  
->>>  SET hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.VerifyPartitionIsSubdirectoryOfTableHook;
-No rows affected 
->>>  
->>>  SELECT count(*) FROM citl_table where part = '1';
-'_c0'
-'500'
-1 row selected 
->>>  
->>>  SET hive.exec.post.hooks=;
-No rows affected 
->>>  
->>>  DROP TABLE citl_table;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/convert_enum_to_string.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/convert_enum_to_string.q.out b/ql/src/test/results/beelinepositive/convert_enum_to_string.q.out
deleted file mode 100644
index b576c73..0000000
--- a/ql/src/test/results/beelinepositive/convert_enum_to_string.q.out
+++ /dev/null
@@ -1,37 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/convert_enum_to_string.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/convert_enum_to_string.q
->>>  -- Ensure Enum fields are converted to strings (instead of struct<value:int>)
->>>  
->>>  create table convert_enum_to_string 
-partitioned by (b string) 
-row format serde "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer" 
-with serdeproperties ( 
-"serialization.class"="org.apache.hadoop.hive.serde2.thrift.test.MegaStruct", 
-"serialization.format"="org.apache.thrift.protocol.TBinaryProtocol");
-No rows affected 
->>>  
->>>  describe convert_enum_to_string;
-'col_name','data_type','comment'
-'my_bool','boolean','from deserializer'
-'my_byte','tinyint','from deserializer'
-'my_16bit_int','smallint','from deserializer'
-'my_32bit_int','int','from deserializer'
-'my_64bit_int','bigint','from deserializer'
-'my_double','double','from deserializer'
-'my_string','string','from deserializer'
-'my_binary','struct<hb:binary,offset:int,isreadonly:boolean,bigendian:boolean,nativebyteorder:boolean>','from deserializer'
-'my_string_string_map','map<string,string>','from deserializer'
-'my_string_enum_map','map<string,string>','from deserializer'
-'my_enum_string_map','map<string,string>','from deserializer'
-'my_enum_struct_map','map<string,struct<my_string:string,my_enum:string>>','from deserializer'
-'my_enum_stringlist_map','map<string,array<string>>','from deserializer'
-'my_enum_structlist_map','map<string,array<struct<my_string:string,my_enum:string>>>','from deserializer'
-'my_stringlist','array<string>','from deserializer'
-'my_structlist','array<struct<my_string:string,my_enum:string>>','from deserializer'
-'my_enumlist','array<string>','from deserializer'
-'my_stringset','array<string>','from deserializer'
-'my_enumset','array<string>','from deserializer'
-'my_structset','array<struct<my_string:string,my_enum:string>>','from deserializer'
-'b','string',''
-21 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/count.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/count.q.out b/ql/src/test/results/beelinepositive/count.q.out
deleted file mode 100644
index 3f29d85..0000000
--- a/ql/src/test/results/beelinepositive/count.q.out
+++ /dev/null
@@ -1,553 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/count.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/count.q
->>>  create table abcd (a int, b int, c int, d int);
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in4.txt' INTO TABLE abcd;
-No rows affected 
->>>  
->>>  select * from abcd;
-'a','b','c','d'
-'','35','23','6'
-'10','1000','50','1'
-'100','100','10','3'
-'12','','80','2'
-'10','100','','5'
-'10','100','45','4'
-'12','100','75','7'
-7 rows selected 
->>>  set hive.map.aggr=true;
-No rows affected 
->>>  explain select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL d)))) (TOK_GROUPBY (TOK_TABLE_OR_COL a))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        abcd '
-'          TableScan'
-'            alias: abcd'
-'            Select Operator'
-'              expressions:'
-'                    expr: a'
-'                    type: int'
-'                    expr: b'
-'                    type: int'
-'                    expr: c'
-'                    type: int'
-'                    expr: d'
-'                    type: int'
-'              outputColumnNames: a, b, c, d'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: count(DISTINCT b)'
-'                      expr: count(DISTINCT c)'
-'                      expr: sum(d)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: a'
-'                      type: int'
-'                      expr: b'
-'                      type: int'
-'                      expr: c'
-'                      type: int'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                        expr: _col1'
-'                        type: int'
-'                        expr: _col2'
-'                        type: int'
-'                  sort order: +++'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: int'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col3'
-'                        type: bigint'
-'                        expr: _col4'
-'                        type: bigint'
-'                        expr: _col5'
-'                        type: bigint'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(DISTINCT KEY._col1:0._col0)'
-'                expr: count(DISTINCT KEY._col1:1._col0)'
-'                expr: sum(VALUE._col2)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: int'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1, _col2, _col3'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: int'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: _col2'
-'                  type: bigint'
-'                  expr: _col3'
-'                  type: bigint'
-'            outputColumnNames: _col0, _col1, _col2, _col3'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-95 rows selected 
->>>  select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
-'a','_c1','_c2','_c3'
-'','1','1','6'
-'10','2','2','10'
-'12','1','2','9'
-'100','1','1','3'
-4 rows selected 
->>>  
->>>  explain select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONSTAR count)) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIOND
 I count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        abcd '
-'          TableScan'
-'            alias: abcd'
-'            Select Operator'
-'              expressions:'
-'                    expr: a'
-'                    type: int'
-'                    expr: b'
-'                    type: int'
-'                    expr: c'
-'                    type: int'
-'                    expr: d'
-'                    type: int'
-'              outputColumnNames: a, b, c, d'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: count(1)'
-'                      expr: count()'
-'                      expr: count(a)'
-'                      expr: count(b)'
-'                      expr: count(c)'
-'                      expr: count(d)'
-'                      expr: count(DISTINCT a)'
-'                      expr: count(DISTINCT b)'
-'                      expr: count(DISTINCT c)'
-'                      expr: count(DISTINCT d)'
-'                      expr: count(DISTINCT a, b)'
-'                      expr: count(DISTINCT b, c)'
-'                      expr: count(DISTINCT c, d)'
-'                      expr: count(DISTINCT a, d)'
-'                      expr: count(DISTINCT a, c)'
-'                      expr: count(DISTINCT b, d)'
-'                      expr: count(DISTINCT a, b, c)'
-'                      expr: count(DISTINCT b, c, d)'
-'                      expr: count(DISTINCT a, c, d)'
-'                      expr: count(DISTINCT a, b, d)'
-'                      expr: count(DISTINCT a, b, c, d)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: a'
-'                      type: int'
-'                      expr: b'
-'                      type: int'
-'                      expr: c'
-'                      type: int'
-'                      expr: d'
-'                      type: int'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                        expr: _col1'
-'                        type: int'
-'                        expr: _col2'
-'                        type: int'
-'                        expr: _col3'
-'                        type: int'
-'                  sort order: ++++'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col4'
-'                        type: bigint'
-'                        expr: _col5'
-'                        type: bigint'
-'                        expr: _col6'
-'                        type: bigint'
-'                        expr: _col7'
-'                        type: bigint'
-'                        expr: _col8'
-'                        type: bigint'
-'                        expr: _col9'
-'                        type: bigint'
-'                        expr: _col10'
-'                        type: bigint'
-'                        expr: _col11'
-'                        type: bigint'
-'                        expr: _col12'
-'                        type: bigint'
-'                        expr: _col13'
-'                        type: bigint'
-'                        expr: _col14'
-'                        type: bigint'
-'                        expr: _col15'
-'                        type: bigint'
-'                        expr: _col16'
-'                        type: bigint'
-'                        expr: _col17'
-'                        type: bigint'
-'                        expr: _col18'
-'                        type: bigint'
-'                        expr: _col19'
-'                        type: bigint'
-'                        expr: _col20'
-'                        type: bigint'
-'                        expr: _col21'
-'                        type: bigint'
-'                        expr: _col22'
-'                        type: bigint'
-'                        expr: _col23'
-'                        type: bigint'
-'                        expr: _col24'
-'                        type: bigint'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(VALUE._col0)'
-'                expr: count(VALUE._col1)'
-'                expr: count(VALUE._col2)'
-'                expr: count(VALUE._col3)'
-'                expr: count(VALUE._col4)'
-'                expr: count(VALUE._col5)'
-'                expr: count(DISTINCT KEY._col0:0._col0)'
-'                expr: count(DISTINCT KEY._col0:1._col0)'
-'                expr: count(DISTINCT KEY._col0:2._col0)'
-'                expr: count(DISTINCT KEY._col0:3._col0)'
-'                expr: count(DISTINCT KEY._col0:4._col0, KEY._col0:4._col1)'
-'                expr: count(DISTINCT KEY._col0:5._col0, KEY._col0:5._col1)'
-'                expr: count(DISTINCT KEY._col0:6._col0, KEY._col0:6._col1)'
-'                expr: count(DISTINCT KEY._col0:7._col0, KEY._col0:7._col1)'
-'                expr: count(DISTINCT KEY._col0:8._col0, KEY._col0:8._col1)'
-'                expr: count(DISTINCT KEY._col0:9._col0, KEY._col0:9._col1)'
-'                expr: count(DISTINCT KEY._col0:10._col0, KEY._col0:10._col1, KEY._col0:10._col2)'
-'                expr: count(DISTINCT KEY._col0:11._col0, KEY._col0:11._col1, KEY._col0:11._col2)'
-'                expr: count(DISTINCT KEY._col0:12._col0, KEY._col0:12._col1, KEY._col0:12._col2)'
-'                expr: count(DISTINCT KEY._col0:13._col0, KEY._col0:13._col1, KEY._col0:13._col2)'
-'                expr: count(DISTINCT KEY._col0:14._col0, KEY._col0:14._col1, KEY._col0:14._col2, KEY._col0:14._col3)'
-'          bucketGroup: false'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: bigint'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: _col2'
-'                  type: bigint'
-'                  expr: _col3'
-'                  type: bigint'
-'                  expr: _col4'
-'                  type: bigint'
-'                  expr: _col5'
-'                  type: bigint'
-'                  expr: _col6'
-'                  type: bigint'
-'                  expr: _col7'
-'                  type: bigint'
-'                  expr: _col8'
-'                  type: bigint'
-'                  expr: _col9'
-'                  type: bigint'
-'                  expr: _col10'
-'                  type: bigint'
-'                  expr: _col11'
-'                  type: bigint'
-'                  expr: _col12'
-'                  type: bigint'
-'                  expr: _col13'
-'                  type: bigint'
-'                  expr: _col14'
-'                  type: bigint'
-'                  expr: _col15'
-'                  type: bigint'
-'                  expr: _col16'
-'                  type: bigint'
-'                  expr: _col17'
-'                  type: bigint'
-'                  expr: _col18'
-'                  type: bigint'
-'                  expr: _col19'
-'                  type: bigint'
-'                  expr: _col20'
-'                  type: bigint'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-199 rows selected 
->>>  select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd;
-'_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18','_c19','_c20'
-'7','7','6','6','6','7','3','3','6','7','4','5','6','6','5','6','4','5','5','5','4'
-1 row selected 
->>>  
->>>  set hive.map.aggr=false;
-No rows affected 
->>>  explain select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL d)))) (TOK_GROUPBY (TOK_TABLE_OR_COL a))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        abcd '
-'          TableScan'
-'            alias: abcd'
-'            Select Operator'
-'              expressions:'
-'                    expr: a'
-'                    type: int'
-'                    expr: b'
-'                    type: int'
-'                    expr: c'
-'                    type: int'
-'                    expr: d'
-'                    type: int'
-'              outputColumnNames: a, b, c, d'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: a'
-'                      type: int'
-'                      expr: b'
-'                      type: int'
-'                      expr: c'
-'                      type: int'
-'                sort order: +++'
-'                Map-reduce partition columns:'
-'                      expr: a'
-'                      type: int'
-'                tag: -1'
-'                value expressions:'
-'                      expr: d'
-'                      type: int'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(DISTINCT KEY._col1:0._col0)'
-'                expr: count(DISTINCT KEY._col1:1._col0)'
-'                expr: sum(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: int'
-'          mode: complete'
-'          outputColumnNames: _col0, _col1, _col2, _col3'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: int'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: _col2'
-'                  type: bigint'
-'                  expr: _col3'
-'                  type: bigint'
-'            outputColumnNames: _col0, _col1, _col2, _col3'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-76 rows selected 
->>>  select a, count(distinct b), count(distinct c), sum(d) from abcd group by a;
-'a','_c1','_c2','_c3'
-'','1','1','6'
-'10','2','2','10'
-'12','1','2','9'
-'100','1','1','3'
-4 rows selected 
->>>  
->>>  explain select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONSTAR count)) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIOND
 I count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        abcd '
-'          TableScan'
-'            alias: abcd'
-'            Select Operator'
-'              expressions:'
-'                    expr: a'
-'                    type: int'
-'                    expr: b'
-'                    type: int'
-'                    expr: c'
-'                    type: int'
-'                    expr: d'
-'                    type: int'
-'              outputColumnNames: a, b, c, d'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: a'
-'                      type: int'
-'                      expr: b'
-'                      type: int'
-'                      expr: c'
-'                      type: int'
-'                      expr: d'
-'                      type: int'
-'                sort order: ++++'
-'                tag: -1'
-'                value expressions:'
-'                      expr: 1'
-'                      type: int'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: count(VALUE._col0)'
-'                expr: count()'
-'                expr: count(KEY._col0:14._col0)'
-'                expr: count(KEY._col0:14._col1)'
-'                expr: count(KEY._col0:14._col2)'
-'                expr: count(KEY._col0:14._col3)'
-'                expr: count(DISTINCT KEY._col0:0._col0)'
-'                expr: count(DISTINCT KEY._col0:1._col0)'
-'                expr: count(DISTINCT KEY._col0:2._col0)'
-'                expr: count(DISTINCT KEY._col0:3._col0)'
-'                expr: count(DISTINCT KEY._col0:4._col0, KEY._col0:4._col1)'
-'                expr: count(DISTINCT KEY._col0:5._col0, KEY._col0:5._col1)'
-'                expr: count(DISTINCT KEY._col0:6._col0, KEY._col0:6._col1)'
-'                expr: count(DISTINCT KEY._col0:7._col0, KEY._col0:7._col1)'
-'                expr: count(DISTINCT KEY._col0:8._col0, KEY._col0:8._col1)'
-'                expr: count(DISTINCT KEY._col0:9._col0, KEY._col0:9._col1)'
-'                expr: count(DISTINCT KEY._col0:10._col0, KEY._col0:10._col1, KEY._col0:10._col2)'
-'                expr: count(DISTINCT KEY._col0:11._col0, KEY._col0:11._col1, KEY._col0:11._col2)'
-'                expr: count(DISTINCT KEY._col0:12._col0, KEY._col0:12._col1, KEY._col0:12._col2)'
-'                expr: count(DISTINCT KEY._col0:13._col0, KEY._col0:13._col1, KEY._col0:13._col2)'
-'                expr: count(DISTINCT KEY._col0:14._col0, KEY._col0:14._col1, KEY._col0:14._col2, KEY._col0:14._col3)'
-'          bucketGroup: false'
-'          mode: complete'
-'          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: bigint'
-'                  expr: _col1'
-'                  type: bigint'
-'                  expr: _col2'
-'                  type: bigint'
-'                  expr: _col3'
-'                  type: bigint'
-'                  expr: _col4'
-'                  type: bigint'
-'                  expr: _col5'
-'                  type: bigint'
-'                  expr: _col6'
-'                  type: bigint'
-'                  expr: _col7'
-'                  type: bigint'
-'                  expr: _col8'
-'                  type: bigint'
-'                  expr: _col9'
-'                  type: bigint'
-'                  expr: _col10'
-'                  type: bigint'
-'                  expr: _col11'
-'                  type: bigint'
-'                  expr: _col12'
-'                  type: bigint'
-'                  expr: _col13'
-'                  type: bigint'
-'                  expr: _col14'
-'                  type: bigint'
-'                  expr: _col15'
-'                  type: bigint'
-'                  expr: _col16'
-'                  type: bigint'
-'                  expr: _col17'
-'                  type: bigint'
-'                  expr: _col18'
-'                  type: bigint'
-'                  expr: _col19'
-'                  type: bigint'
-'                  expr: _col20'
-'                  type: bigint'
-'            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-124 rows selected 
->>>  select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd;
-'_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18','_c19','_c20'
-'7','7','6','6','6','7','3','3','6','7','4','5','6','6','5','6','4','5','5','5','4'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/cp_mj_rc.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/cp_mj_rc.q.out b/ql/src/test/results/beelinepositive/cp_mj_rc.q.out
deleted file mode 100644
index 02038f8..0000000
--- a/ql/src/test/results/beelinepositive/cp_mj_rc.q.out
+++ /dev/null
@@ -1,20 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/cp_mj_rc.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/cp_mj_rc.q
->>>  create table src_six_columns (k1 string, v1 string, k2 string, v2 string, k3 string, v3 string) stored as rcfile;
-No rows affected 
->>>  insert overwrite table src_six_columns select value, value, key, value, value, value from src;
-'value','value','key','value','value','value'
-No rows selected 
->>>  create table src_two_columns (k1 string, v1 string) stored as rcfile;
-No rows affected 
->>>  insert overwrite table src_two_columns select key, value from src;
-'key','value'
-No rows selected 
->>>  SELECT /*+ MAPJOIN(six) */ six.*, two.k1 from src_six_columns six join src_two_columns two on (six.k3=two.k1);
-'k1','v1','k2','v2','k3','v3','k1'
-No rows selected 
->>>  
->>>  SELECT /*+ MAPJOIN(two) */ two.*, six.k3 from src_six_columns six join src_two_columns two on (six.k3=two.k1);
-'k1','v1','k3'
-No rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_1.q.out b/ql/src/test/results/beelinepositive/create_1.q.out
deleted file mode 100644
index 9e35cbf..0000000
--- a/ql/src/test/results/beelinepositive/create_1.q.out
+++ /dev/null
@@ -1,89 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_1.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_1.q
->>>  set fs.default.name=invalidscheme:///;
-No rows affected 
->>>  
->>>  CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE table1;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table1;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table1, dbName:create_1, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_1.db/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{transient_lastDdlTime=!!UNIXTIME!!}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-4 rows selected 
->>>  
->>>  CREATE TABLE IF NOT EXISTS table1 (a STRING, b STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  CREATE TABLE IF NOT EXISTS table2 (a STRING, b INT) STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE table2;
-'col_name','data_type','comment'
-'a','string',''
-'b','int',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table2;
-'col_name','data_type','comment'
-'a','string',''
-'b','int',''
-'','',''
-'Detailed Table Information','Table(tableName:table2, dbName:create_1, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:int, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_1.db/table2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{transient_lastDdlTime=!!UNIXTIME!!}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-4 rows selected 
->>>  
->>>  CREATE TABLE table3 (a STRING, b STRING) 
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
-STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE table3;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table3;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table3, dbName:create_1, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_1.db/table3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=',', field.delim='
-4 rows selected 
->>>  
->>>  CREATE TABLE table4 (a STRING, b STRING) 
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
-STORED AS SEQUENCEFILE;
-No rows affected 
->>>  DESCRIBE table4;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table4;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table4, dbName:create_1, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_1.db/table4, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=',', field.delim='
-4 rows selected 
->>>  
->>>  CREATE TABLE table5 (a STRING, b STRING) 
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' 
-STORED AS RCFILE;
-No rows affected 
->>>  DESCRIBE table5;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table5;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table5, dbName:create_1, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_1.db/table5, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=',', field.delim='
-4 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_big_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_big_view.q.out b/ql/src/test/results/beelinepositive/create_big_view.q.out
deleted file mode 100644
index 7c1c3c8..0000000
--- a/ql/src/test/results/beelinepositive/create_big_view.q.out
+++ /dev/null
@@ -1,256 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_big_view.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_big_view.q
->>>  DROP VIEW big_view;
-No rows affected 
->>>  
->>>  -- Define a view with long SQL text to test metastore and other limits.
->>>  
->>>  CREATE VIEW big_view AS SELECT 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' AS a, 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 
-FROM src;
-'a','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18','_c19','_c20','_c21','_c22','_c23','_c24','_c25','_c26','_c27','_c28','_c29','_c30','_c31','_c32','_c33','_c34','_c35','_c36','_c37','_c38','_c39','_c40','_c41','_c42','_c43','_c44','_c45','_c46','_c47','_c48','_c49','_c50','_c51','_c52','_c53','_c54','_c55','_c56','_c57','_c58','_c59','_c60','_c61','_c62','_c63','_c64','_c65','_c66','_c67','_c68','_c69','_c70','_c71','_c72','_c73','_c74','_c75','_c76','_c77','_c78','_c79','_c80','_c81','_c82','_c83','_c84','_c85','_c86','_c87','_c88','_c89','_c90','_c91','_c92','_c93','_c94','_c95','_c96','_c97','_c98','_c99','_c100','_c101','_c102','_c103','_c104','_c105','_c106','_c107','_c108','_c109','_c110','_c111','_c112','_c113','_c114','_c115','_c116','_c117','_c118','_c119','_c120','_c121','_c122','_c123','_c124','_c125','_c126','_c127','_c128','_c129','_c130','_c131','_c132','_c133','_c134','_c135','_c136','_c137','_c13
 8','_c139','_c140','_c141','_c142','_c143','_c144','_c145','_c146','_c147','_c148','_c149','_c150','_c151','_c152','_c153','_c154','_c155','_c156','_c157','_c158','_c159','_c160','_c161','_c162','_c163','_c164','_c165','_c166','_c167','_c168','_c169','_c170','_c171','_c172','_c173','_c174','_c175','_c176','_c177','_c178','_c179','_c180','_c181','_c182','_c183','_c184','_c185','_c186','_c187','_c188','_c189','_c190','_c191','_c192','_c193','_c194','_c195','_c196','_c197','_c198','_c199','_c200','_c201','_c202','_c203','_c204','_c205','_c206','_c207','_c208','_c209','_c210','_c211','_c212','_c213','_c214','_c215','_c216','_c217','_c218','_c219','_c220','_c221','_c222','_c223','_c224','_c225','_c226','_c227','_c228','_c229','_c230','_c231','_c232','_c233','_c234'
-No rows selected 
->>>  
->>>  SELECT a FROM big_view 
-LIMIT 1;
-'a'
-'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
-1 row selected 
->>>  
->>>  DROP VIEW big_view;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_default_prop.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_default_prop.q.out b/ql/src/test/results/beelinepositive/create_default_prop.q.out
deleted file mode 100644
index 715c728..0000000
--- a/ql/src/test/results/beelinepositive/create_default_prop.q.out
+++ /dev/null
@@ -1,34 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_default_prop.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_default_prop.q
->>>  set hive.table.parameters.default=p1=v1,P2=v21=v22=v23;
-No rows affected 
->>>  CREATE TABLE table_p1 (a STRING);
-No rows affected 
->>>  DESC EXTENDED table_p1;
-'col_name','data_type','comment'
-'a','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table_p1, dbName:create_default_prop, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_default_prop.db/table_p1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{p1=v1, transient_lastDdlTime=!!UNIXTIME!!, P2=v21=v22=v23}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-3 rows selected 
->>>  
->>>  set hive.table.parameters.default=p3=v3;
-No rows affected 
->>>  CREATE TABLE table_p2 LIKE table_p1;
-No rows affected 
->>>  DESC EXTENDED table_p2;
-'col_name','data_type','comment'
-'a','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table_p2, dbName:create_default_prop, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_default_prop.db/table_p2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{transient_lastDdlTime=!!UNIXTIME!!}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-3 rows selected 
->>>  
->>>  CREATE TABLE table_p3 AS SELECT * FROM table_p1;
-'a'
-No rows selected 
->>>  DESC EXTENDED table_p3;
-'col_name','data_type','comment'
-'a','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table_p3, dbName:create_default_prop, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_default_prop.db/table_p3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{numPartitions=0, numFiles=1, p3=v3, transient_lastDdlTime=!!UNIXTIME!!, numRows=0, totalSize=0, rawDataSize=0}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-3 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_escape.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_escape.q.out b/ql/src/test/results/beelinepositive/create_escape.q.out
deleted file mode 100644
index 5f86ea7..0000000
--- a/ql/src/test/results/beelinepositive/create_escape.q.out
+++ /dev/null
@@ -1,29 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_escape.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_escape.q
->>>  CREATE TABLE table1 (a STRING, b STRING) 
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ESCAPED BY '\\' 
-STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  DESCRIBE table1;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-2 rows selected 
->>>  DESCRIBE EXTENDED table1;
-'col_name','data_type','comment'
-'a','string',''
-'b','string',''
-'','',''
-'Detailed Table Information','Table(tableName:table1, dbName:create_escape, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:null), FieldSchema(name:b, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/create_escape.db/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{escape.delim=\, serialization.format=',', field.delim='
-4 rows selected 
->>>  
->>>  INSERT OVERWRITE TABLE table1 SELECT key, '\\\t\\' FROM src WHERE key = 86;
-'key','_c1'
-No rows selected 
->>>  
->>>  SELECT * FROM table1;
-'a','b'
-'86','\	\'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_genericudaf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_genericudaf.q.out b/ql/src/test/results/beelinepositive/create_genericudaf.q.out
deleted file mode 100644
index 3d9ba7c..0000000
--- a/ql/src/test/results/beelinepositive/create_genericudaf.q.out
+++ /dev/null
@@ -1,100 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_genericudaf.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_genericudaf.q
->>>  EXPLAIN 
-CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage';
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_CREATEFUNCTION test_avg 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage')'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-0'
-''
-''
-10 rows selected 
->>>  
->>>  CREATE TEMPORARY FUNCTION test_avg AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage';
-No rows affected 
->>>  
->>>  EXPLAIN 
-SELECT 
-test_avg(1), 
-test_avg(substr(value,5)) 
-FROM src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION test_avg 1)) (TOK_SELEXPR (TOK_FUNCTION test_avg (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5))))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            Select Operator'
-'              expressions:'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: test_avg(1)'
-'                      expr: test_avg(substr(value, 5))'
-'                bucketGroup: false'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  sort order: '
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: struct<count:bigint,sum:double>'
-'                        expr: _col1'
-'                        type: struct<count:bigint,sum:double>'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: test_avg(VALUE._col0)'
-'                expr: test_avg(VALUE._col1)'
-'          bucketGroup: false'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: double'
-'                  expr: _col1'
-'                  type: double'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-61 rows selected 
->>>  
->>>  SELECT 
-test_avg(1), 
-test_avg(substr(value,5)) 
-FROM src;
-'_c0','_c1'
-'1.0','260.182'
-1 row selected 
->>>  
->>>  DROP TEMPORARY FUNCTIOn test_avg;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_genericudf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_genericudf.q.out b/ql/src/test/results/beelinepositive/create_genericudf.q.out
deleted file mode 100644
index 1f19ec2..0000000
--- a/ql/src/test/results/beelinepositive/create_genericudf.q.out
+++ /dev/null
@@ -1,44 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_genericudf.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_genericudf.q
->>>  EXPLAIN 
-CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate';
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_CREATEFUNCTION test_translate 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate')'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-0'
-''
-''
-10 rows selected 
->>>  
->>>  CREATE TEMPORARY FUNCTION test_translate AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate';
-No rows affected 
->>>  
->>>  CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, c7 STRING);
-No rows affected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE dest1 
-SELECT 
-test_translate('abc', 'a', 'b'), 
-test_translate('abc', 'ab', 'bc'), 
-test_translate(NULL, 'a', 'b'), 
-test_translate('a', NULL, 'b'), 
-test_translate('a', 'a', NULL), 
-test_translate('abc', 'ab', 'b'), 
-test_translate('abc', 'a', 'ab');
-'_c0','_c1','_c2','_c3','_c4','_c5','_c6'
-No rows selected 
->>>  
->>>  SELECT dest1.* FROM dest1 LIMIT 1;
-'c1','c2','c3','c4','c5','c6','c7'
-'bbc','bcc','','','','bc','abc'
-1 row selected 
->>>  
->>>  DROP TEMPORARY FUNCTION test_translate;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_insert_outputformat.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_insert_outputformat.q.out b/ql/src/test/results/beelinepositive/create_insert_outputformat.q.out
deleted file mode 100644
index f1fd92c..0000000
--- a/ql/src/test/results/beelinepositive/create_insert_outputformat.q.out
+++ /dev/null
@@ -1,54 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_insert_outputformat.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_insert_outputformat.q
->>>  
->>>  
->>>  CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS 
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
-No rows affected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10;
-'_col0','_col1'
-No rows selected 
->>>  describe table_test_output_format;
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-2 rows selected 
->>>  
->>>  
->>>  
->>>  CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS 
-INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
-No rows affected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10;
-'_col0','_col1'
-No rows selected 
->>>  describe table_test_output_format_sequencefile;
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-2 rows selected 
->>>  
->>>  
->>>  
->>>  CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS 
-INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat';
-No rows affected 
->>>  
->>>  FROM src 
-INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10;
-'_col0','_col1'
-No rows selected 
->>>  describe table_test_output_format_hivesequencefile;
-'col_name','data_type','comment'
-'key','int',''
-'value','string',''
-2 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_like.q.out b/ql/src/test/results/beelinepositive/create_like.q.out
deleted file mode 100644
index df1ccc3..0000000
--- a/ql/src/test/results/beelinepositive/create_like.q.out
+++ /dev/null
@@ -1,176 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_like.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_like.q
->>>  
->>>  
->>>  
->>>  
->>>  CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE;
-No rows affected 
->>>  DESCRIBE FORMATTED table1;
-'col_name','data_type','comment'
-'# col_name            ','data_type           ','comment             '
-'','',''
-'a                   ','string              ','None                '
-'b                   ','string              ','None                '
-'','',''
-'# Detailed Table Information','',''
-'Database:           ','create_like         ',''
-'Owner:              ','!!{user.name}!!                ',''
-'CreateTime:         ','!!TIMESTAMP!!',''
-'LastAccessTime:     ','UNKNOWN             ',''
-'Retention:          ','0                   ',''
-'Location:           ','!!{hive.metastore.warehouse.dir}!!/create_like.db/table1',''
-'Table Type:         ','MANAGED_TABLE       ',''
-'Table Parameters:','',''
-'','transient_lastDdlTime','!!UNIXTIME!!          '
-'','',''
-'# Storage Information','',''
-'SerDe Library:      ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe',''
-'InputFormat:        ','org.apache.hadoop.mapred.TextInputFormat',''
-'OutputFormat:       ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',''
-'Compressed:         ','No                  ',''
-'Num Buckets:        ','-1                  ',''
-'Bucket Columns:     ','[]                  ',''
-'Sort Columns:       ','[]                  ',''
-'Storage Desc Params:','',''
-'','serialization.format','1                   '
-27 rows selected 
->>>  
->>>  CREATE TABLE table2 LIKE table1;
-No rows affected 
->>>  DESCRIBE FORMATTED table2;
-'col_name','data_type','comment'
-'# col_name            ','data_type           ','comment             '
-'','',''
-'a                   ','string              ','None                '
-'b                   ','string              ','None                '
-'','',''
-'# Detailed Table Information','',''
-'Database:           ','create_like         ',''
-'Owner:              ','!!{user.name}!!                ',''
-'CreateTime:         ','!!TIMESTAMP!!',''
-'LastAccessTime:     ','UNKNOWN             ',''
-'Retention:          ','0                   ',''
-'Location:           ','!!{hive.metastore.warehouse.dir}!!/create_like.db/table2',''
-'Table Type:         ','MANAGED_TABLE       ',''
-'Table Parameters:','',''
-'','transient_lastDdlTime','!!UNIXTIME!!          '
-'','',''
-'# Storage Information','',''
-'SerDe Library:      ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe',''
-'InputFormat:        ','org.apache.hadoop.mapred.TextInputFormat',''
-'OutputFormat:       ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',''
-'Compressed:         ','No                  ',''
-'Num Buckets:        ','-1                  ',''
-'Bucket Columns:     ','[]                  ',''
-'Sort Columns:       ','[]                  ',''
-'Storage Desc Params:','',''
-'','serialization.format','1                   '
-27 rows selected 
->>>  
->>>  CREATE TABLE IF NOT EXISTS table2 LIKE table1;
-No rows affected 
->>>  
->>>  CREATE EXTERNAL TABLE IF NOT EXISTS table2 LIKE table1;
-No rows affected 
->>>  
->>>  CREATE EXTERNAL TABLE IF NOT EXISTS table3 LIKE table1;
-No rows affected 
->>>  DESCRIBE FORMATTED table3;
-'col_name','data_type','comment'
-'# col_name            ','data_type           ','comment             '
-'','',''
-'a                   ','string              ','None                '
-'b                   ','string              ','None                '
-'','',''
-'# Detailed Table Information','',''
-'Database:           ','create_like         ',''
-'Owner:              ','!!{user.name}!!                ',''
-'CreateTime:         ','!!TIMESTAMP!!',''
-'LastAccessTime:     ','UNKNOWN             ',''
-'Retention:          ','0                   ',''
-'Location:           ','!!{hive.metastore.warehouse.dir}!!/create_like.db/table3',''
-'Table Type:         ','EXTERNAL_TABLE      ',''
-'Table Parameters:','',''
-'','EXTERNAL            ','TRUE                '
-'','transient_lastDdlTime','!!UNIXTIME!!          '
-'','',''
-'# Storage Information','',''
-'SerDe Library:      ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe',''
-'InputFormat:        ','org.apache.hadoop.mapred.TextInputFormat',''
-'OutputFormat:       ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',''
-'Compressed:         ','No                  ',''
-'Num Buckets:        ','-1                  ',''
-'Bucket Columns:     ','[]                  ',''
-'Sort Columns:       ','[]                  ',''
-'Storage Desc Params:','',''
-'','serialization.format','1                   '
-28 rows selected 
->>>  
->>>  INSERT OVERWRITE TABLE table1 SELECT key, value FROM src WHERE key = 86;
-'key','value'
-No rows selected 
->>>  INSERT OVERWRITE TABLE table2 SELECT key, value FROM src WHERE key = 100;
-'key','value'
-No rows selected 
->>>  
->>>  SELECT * FROM table1;
-'a','b'
-'86','val_86'
-1 row selected 
->>>  SELECT * FROM table2;
-'a','b'
-'100','val_100'
-'100','val_100'
-2 rows selected 
->>>  
->>>  CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.src.data.dir}/files/ext_test';
-No rows affected 
->>>  CREATE EXTERNAL TABLE table5 LIKE table4 LOCATION '${system:test.src.data.dir}/files/ext_test';
-No rows affected 
->>>  
->>>  SELECT * FROM table4;
-'a'
-'1'
-'2'
-'3'
-'4'
-'5'
-'6'
-6 rows selected 
->>>  SELECT * FROM table5;
-'a'
-'1'
-'2'
-'3'
-'4'
-'5'
-'6'
-6 rows selected 
->>>  
->>>  DROP TABLE table5;
-No rows affected 
->>>  SELECT * FROM table4;
-'a'
-'1'
-'2'
-'3'
-'4'
-'5'
-'6'
-6 rows selected 
->>>  DROP TABLE table4;
-No rows affected 
->>>  
->>>  CREATE EXTERNAL TABLE table4 (a INT) LOCATION '${system:test.src.data.dir}/files/ext_test';
-No rows affected 
->>>  SELECT * FROM table4;
-'a'
-'1'
-'2'
-'3'
-'4'
-'5'
-'6'
-6 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/create_like2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/create_like2.q.out b/ql/src/test/results/beelinepositive/create_like2.q.out
deleted file mode 100644
index ca6c69a..0000000
--- a/ql/src/test/results/beelinepositive/create_like2.q.out
+++ /dev/null
@@ -1,46 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/create_like2.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/create_like2.q
->>>  -- Tests the copying over of Table Parameters according to a HiveConf setting
->>>  -- when doing a CREATE TABLE LIKE.
->>>  
->>>  CREATE TABLE table1(a INT, b STRING);
-No rows affected 
->>>  ALTER TABLE table1 SET TBLPROPERTIES ('a'='1', 'b'='2', 'c'='3', 'd' = '4');
-No rows affected 
->>>  
->>>  SET hive.ddl.createtablelike.properties.whitelist=a,c,D;
-No rows affected 
->>>  CREATE TABLE table2 LIKE table1;
-No rows affected 
->>>  DESC FORMATTED table2;
-'col_name','data_type','comment'
-'# col_name            ','data_type           ','comment             '
-'','',''
-'a                   ','int                 ','None                '
-'b                   ','string              ','None                '
-'','',''
-'# Detailed Table Information','',''
-'Database:           ','create_like2        ',''
-'Owner:              ','!!{user.name}!!                ',''
-'CreateTime:         ','!!TIMESTAMP!!',''
-'LastAccessTime:     ','UNKNOWN             ',''
-'Retention:          ','0                   ',''
-'Location:           ','!!{hive.metastore.warehouse.dir}!!/create_like2.db/table2',''
-'Table Type:         ','MANAGED_TABLE       ',''
-'Table Parameters:','',''
-'','a                   ','1                   '
-'','c                   ','3                   '
-'','transient_lastDdlTime','!!UNIXTIME!!          '
-'','',''
-'# Storage Information','',''
-'SerDe Library:      ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe',''
-'InputFormat:        ','org.apache.hadoop.mapred.TextInputFormat',''
-'OutputFormat:       ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',''
-'Compressed:         ','No                  ',''
-'Num Buckets:        ','-1                  ',''
-'Bucket Columns:     ','[]                  ',''
-'Sort Columns:       ','[]                  ',''
-'Storage Desc Params:','',''
-'','serialization.format','1                   '
-29 rows selected 
->>>  !record