You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2017/02/03 21:50:52 UTC

[39/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/auto_join_filters.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/auto_join_filters.q.out b/ql/src/test/results/beelinepositive/auto_join_filters.q.out
deleted file mode 100644
index a1573c2..0000000
--- a/ql/src/test/results/beelinepositive/auto_join_filters.q.out
+++ /dev/null
@@ -1,254 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/auto_join_filters.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/auto_join_filters.q
->>>  set hive.auto.convert.join = true;
-No rows affected 
->>>  
->>>  CREATE TABLE myinput1(key int, value int);
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in3.txt' INTO TABLE myinput1;
-No rows affected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value))  FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value))  FROM myinput1 a LEFT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4937935'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value))  FROM myinput1 a RIGHT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value))  FROM myinput1 a FULL OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'19749880'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4937935'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4937935'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4937935'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key and a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4937935'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3080335'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4939870'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4939870'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4939870'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'4939870'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.value = c.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value AND c.key > 40 AND c.value > 50 AND c.key = c.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1 c ON (b.key=c.key AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1 c ON (b.key=c.key AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3080335'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.key = c.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value AND c.key > 40 AND c.value > 50 AND c.key = c.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  
->>>  CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS;
-No rows affected 
->>>  CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input1;
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input1;
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in1.txt' into table smb_input2;
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in2.txt' into table smb_input2;
-No rows affected 
->>>  
->>>  SET hive.optimize.bucketmapjoin = true;
-No rows affected 
->>>  SET hive.optimize.bucketmapjoin.sortedmerge = true;
-No rows affected 
->>>  SET hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
-No rows affected 
->>>  
->>>  SET hive.outerjoin.supports.filters = false;
-No rows affected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key and a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.value = c.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value AND c.key > 40 AND c.value > 50 AND c.key = c.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1 c ON (b.key=c.key AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1 c ON (b.key=c.key AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value);
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.key = c.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value AND c.key > 40 AND c.value > 50 AND c.key = c.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/auto_join_nulls.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/auto_join_nulls.q.out b/ql/src/test/results/beelinepositive/auto_join_nulls.q.out
deleted file mode 100644
index 822fe42..0000000
--- a/ql/src/test/results/beelinepositive/auto_join_nulls.q.out
+++ /dev/null
@@ -1,101 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/auto_join_nulls.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/auto_join_nulls.q
->>>  set hive.auto.convert.join = true;
-No rows affected 
->>>  
->>>  CREATE TABLE myinput1(key int, value int);
-No rows affected 
->>>  LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE myinput1;
-No rows affected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b;
-'_c0'
-'13630578'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b;
-'_c0'
-'13630578'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b;
-'_c0'
-'13630578'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.value;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.key = b.key;
-'_c0'
-'4509856'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value;
-'_c0'
-'3112070'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a JOIN myinput1 b ON a.value = b.value and a.key=b.key;
-'_c0'
-'3078400'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.value;
-'_c0'
-'4542003'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.value = b.value;
-'_c0'
-'4542038'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key;
-'_c0'
-'4543491'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key = b.key and a.value=b.value;
-'_c0'
-'4542003'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.value;
-'_c0'
-'3079923'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key = b.key;
-'_c0'
-'4509891'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.value = b.value;
-'_c0'
-'3113558'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key=b.key and a.value = b.value;
-'_c0'
-'3079923'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.value;
-'_c0'
-'4543526'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key = b.key;
-'_c0'
-'4543526'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value;
-'_c0'
-'4543526'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.value = b.value and a.key=b.key;
-'_c0'
-'4543526'
-1 row selected 
->>>  
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a LEFT OUTER JOIN myinput1 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1 c ON (b.value=c.value);
-'_c0'
-'3112070'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1 a RIGHT OUTER JOIN myinput1 b ON (a.value=b.value) LEFT OUTER JOIN myinput1 c ON (b.value=c.value);
-'_c0'
-'3113558'
-1 row selected 
->>>  SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a LEFT OUTER JOIN myinput1 b RIGHT OUTER JOIN myinput1 c ON a.value = b.value and b.value = c.value;
-'_c0'
-'3112070'
-1 row selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/autogen_colalias.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/autogen_colalias.q.out b/ql/src/test/results/beelinepositive/autogen_colalias.q.out
deleted file mode 100644
index 5c537bd..0000000
--- a/ql/src/test/results/beelinepositive/autogen_colalias.q.out
+++ /dev/null
@@ -1,70 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/autogen_colalias.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/autogen_colalias.q
->>>  CREATE TEMPORARY FUNCTION test_max AS 'org.apache.hadoop.hive.ql.udf.UDAFTestMax';
-No rows affected 
->>>  
->>>  create table dest_grouped_old1 as select 1+1, 2+2 as zz, src.key, test_max(length(src.value)), count(src.value), sin(count(src.value)), count(sin(src.value)), unix_timestamp(), CAST(SUM(IF(value > 10, value, 1)) AS INT), if(src.key > 1, 
-1, 
-0) 
-from src group by src.key;
-'_c0','zz','key','_c3','_c4','_c5','_c6','_c7','_c8','_c9'
-No rows selected 
->>>  describe dest_grouped_old1;
-'col_name','data_type','comment'
-'_c0','int',''
-'zz','int',''
-'key','string',''
-'_c3','int',''
-'_c4','bigint',''
-'_c5','double',''
-'_c6','bigint',''
-'_c7','bigint',''
-'_c8','int',''
-'_c9','int',''
-10 rows selected 
->>>  
->>>  create table dest_grouped_old2 as select distinct src.key from src;
-'key'
-No rows selected 
->>>  describe dest_grouped_old2;
-'col_name','data_type','comment'
-'key','string',''
-1 row selected 
->>>  
->>>  set hive.autogen.columnalias.prefix.label=column_;
-No rows affected 
->>>  set hive.autogen.columnalias.prefix.includefuncname=true;
-No rows affected 
->>>  
->>>  create table dest_grouped_new1 as select 1+1, 2+2 as zz, ((src.key % 2)+2)/2, test_max(length(src.value)), count(src.value), sin(count(src.value)), count(sin(src.value)), unix_timestamp(), CAST(SUM(IF(value > 10, value, 1)) AS INT), if(src.key > 10, 
-(src.key +5) % 2, 
-0) 
-from src group by src.key;
-'column_0','zz','column_2','test_max_length_src__3','count_src_value_4','sin_count_src_value_5','count_sin_src_value_6','unix_timestamp_7','sum_if_value_10_valu_8','if_src_key_10_src_ke_9'
-No rows selected 
->>>  describe dest_grouped_new1;
-'col_name','data_type','comment'
-'column_0','int',''
-'zz','int',''
-'column_2','double',''
-'test_max_length_src__3','int',''
-'count_src_value_4','bigint',''
-'sin_count_src_value_5','double',''
-'count_sin_src_value_6','bigint',''
-'unix_timestamp_7','bigint',''
-'sum_if_value_10_valu_8','int',''
-'if_src_key_10_src_ke_9','double',''
-10 rows selected 
->>>  
->>>  create table dest_grouped_new2 as select distinct src.key from src;
-'key'
-No rows selected 
->>>  describe dest_grouped_new2;
-'col_name','data_type','comment'
-'key','string',''
-1 row selected 
->>>  
->>>  -- Drop the temporary function at the end till HIVE-3160 gets fixed
->>>  DROP TEMPORARY FUNCTION test_max;
-No rows affected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/avro_change_schema.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/avro_change_schema.q.out b/ql/src/test/results/beelinepositive/avro_change_schema.q.out
deleted file mode 100644
index b89ba4a..0000000
--- a/ql/src/test/results/beelinepositive/avro_change_schema.q.out
+++ /dev/null
@@ -1,42 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/avro_change_schema.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/avro_change_schema.q
->>>  -- verify that we can update the table properties
->>>  CREATE TABLE avro2 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ "namespace": "org.apache.hive", 
-"name": "first_schema", 
-"type": "record", 
-"fields": [ 
-{ "name":"string1", "type":"string" }, 
-{ "name":"string2", "type":"string" } 
-] }');
-No rows affected 
->>>  
->>>  DESCRIBE avro2;
-'col_name','data_type','comment'
-'string1','string','from deserializer'
-'string2','string','from deserializer'
-2 rows selected 
->>>  
->>>  ALTER TABLE avro2 SET TBLPROPERTIES ('avro.schema.literal'='{ "namespace": "org.apache.hive", 
-"name": "second_schema", 
-"type": "record", 
-"fields": [ 
-{ "name":"int1", "type":"int" }, 
-{ "name":"float1", "type":"float" }, 
-{ "name":"double1", "type":"double" } 
-] }');
-No rows affected 
->>>  
->>>  DESCRIBE avro2;
-'col_name','data_type','comment'
-'int1','int','from deserializer'
-'float1','float','from deserializer'
-'double1','double','from deserializer'
-3 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/avro_evolved_schemas.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/avro_evolved_schemas.q.out b/ql/src/test/results/beelinepositive/avro_evolved_schemas.q.out
deleted file mode 100644
index bd17fa9..0000000
--- a/ql/src/test/results/beelinepositive/avro_evolved_schemas.q.out
+++ /dev/null
@@ -1,66 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/avro_evolved_schemas.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/avro_evolved_schemas.q
->>>  -- verify that new fields in schema get propagated to table scans
->>>  CREATE TABLE doctors_with_new_field 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ 
-"namespace": "testing.hive.avro.serde", 
-"name": "doctors", 
-"type": "record", 
-"fields": [ 
-{ 
-"name":"number", 
-"type":"int", 
-"doc":"Order of playing the role" 
-}, 
-{ 
-"name":"first_name", 
-"type":"string", 
-"doc":"first name of actor playing role" 
-}, 
-{ 
-"name":"last_name", 
-"type":"string", 
-"doc":"last name of actor playing role" 
-}, 
-{ 
-"name":"extra_field", 
-"type":"string", 
-"doc:":"an extra field not in the original file", 
-"default":"fishfingers and custard" 
-} 
-] 
-}');
-No rows affected 
->>>  
->>>  DESCRIBE doctors_with_new_field;
-'col_name','data_type','comment'
-'number','int','from deserializer'
-'first_name','string','from deserializer'
-'last_name','string','from deserializer'
-'extra_field','string','from deserializer'
-4 rows selected 
->>>  
->>>  LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors_with_new_field;
-No rows affected 
->>>  
->>>  SELECT * FROM doctors_with_new_field ORDER BY first_name;
-'number','first_name','last_name','extra_field'
-'9','Christopher','Eccleston','fishfingers and custard'
-'6','Colin','Baker','fishfingers and custard'
-'10','David','Tennant','fishfingers and custard'
-'3','Jon','Pertwee','fishfingers and custard'
-'11','Matt','Smith','fishfingers and custard'
-'2','Patrick','Troughton','fishfingers and custard'
-'8','Paul','McGann','fishfingers and custard'
-'5','Peter','Davison','fishfingers and custard'
-'7','Sylvester','McCoy','fishfingers and custard'
-'4','Tom','Baker','fishfingers and custard'
-'1','William','Hartnell','fishfingers and custard'
-11 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/avro_joins.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/avro_joins.q.out b/ql/src/test/results/beelinepositive/avro_joins.q.out
deleted file mode 100644
index fdced96..0000000
--- a/ql/src/test/results/beelinepositive/avro_joins.q.out
+++ /dev/null
@@ -1,107 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/avro_joins.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/avro_joins.q
->>>  -- verify that new joins bring in correct schemas (including evolved schemas)
->>>  
->>>  CREATE TABLE doctors4 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ 
-"namespace": "testing.hive.avro.serde", 
-"name": "doctors", 
-"type": "record", 
-"fields": [ 
-{ 
-"name":"number", 
-"type":"int", 
-"doc":"Order of playing the role" 
-}, 
-{ 
-"name":"first_name", 
-"type":"string", 
-"doc":"first name of actor playing role" 
-}, 
-{ 
-"name":"last_name", 
-"type":"string", 
-"doc":"last name of actor playing role" 
-}, 
-{ 
-"name":"extra_field", 
-"type":"string", 
-"doc:":"an extra field not in the original file", 
-"default":"fishfingers and custard" 
-} 
-] 
-}');
-No rows affected 
->>>  
->>>  DESCRIBE doctors4;
-'col_name','data_type','comment'
-'number','int','from deserializer'
-'first_name','string','from deserializer'
-'last_name','string','from deserializer'
-'extra_field','string','from deserializer'
-4 rows selected 
->>>  
->>>  LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors4;
-No rows affected 
->>>  
->>>  CREATE TABLE episodes 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ 
-"namespace": "testing.hive.avro.serde", 
-"name": "episodes", 
-"type": "record", 
-"fields": [ 
-{ 
-"name":"title", 
-"type":"string", 
-"doc":"episode title" 
-}, 
-{ 
-"name":"air_date", 
-"type":"string", 
-"doc":"initial date" 
-}, 
-{ 
-"name":"doctor", 
-"type":"int", 
-"doc":"main actor playing the Doctor in episode" 
-} 
-] 
-}');
-No rows affected 
->>>  
->>>  DESCRIBE episodes;
-'col_name','data_type','comment'
-'title','string','from deserializer'
-'air_date','string','from deserializer'
-'doctor','int','from deserializer'
-3 rows selected 
->>>  
->>>  LOAD DATA LOCAL INPATH '../data/files/episodes.avro' INTO TABLE episodes;
-No rows affected 
->>>  
->>>  SELECT e.title, e.air_date, d.first_name, d.last_name, d.extra_field, e.air_date 
-FROM doctors4 d JOIN episodes e ON (d.number=e.doctor) 
-ORDER BY d.last_name, e.title;
-'title','air_date','first_name','last_name','extra_field','air_date'
-'Horror of Fang Rock','3 September 1977','Tom','Baker','fishfingers and custard','3 September 1977'
-'The Mysterious Planet','6 September 1986','Colin','Baker','fishfingers and custard','6 September 1986'
-'Castrolava','4 January 1982','Peter','Davison','fishfingers and custard','4 January 1982'
-'Rose','26 March 2005','Christopher','Eccleston','fishfingers and custard','26 March 2005'
-'An Unearthly Child','23 November 1963','William','Hartnell','fishfingers and custard','23 November 1963'
-'The Doctor's Wife','14 May 2011','Matt','Smith','fishfingers and custard','14 May 2011'
-'The Eleventh Hour','3 April 2010','Matt','Smith','fishfingers and custard','3 April 2010'
-'The Power of the Daleks','5 November 1966','Patrick','Troughton','fishfingers and custard','5 November 1966'
-8 rows selected 
->>>  
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/avro_sanity_test.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/avro_sanity_test.q.out b/ql/src/test/results/beelinepositive/avro_sanity_test.q.out
deleted file mode 100644
index 2dd8d7b..0000000
--- a/ql/src/test/results/beelinepositive/avro_sanity_test.q.out
+++ /dev/null
@@ -1,59 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/avro_sanity_test.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/avro_sanity_test.q
->>>  -- verify that we can actually read avro files
->>>  CREATE TABLE doctors 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ 
-"namespace": "testing.hive.avro.serde", 
-"name": "doctors", 
-"type": "record", 
-"fields": [ 
-{ 
-"name":"number", 
-"type":"int", 
-"doc":"Order of playing the role" 
-}, 
-{ 
-"name":"first_name", 
-"type":"string", 
-"doc":"first name of actor playing role" 
-}, 
-{ 
-"name":"last_name", 
-"type":"string", 
-"doc":"last name of actor playing role" 
-} 
-] 
-}');
-No rows affected 
->>>  
->>>  DESCRIBE doctors;
-'col_name','data_type','comment'
-'number','int','from deserializer'
-'first_name','string','from deserializer'
-'last_name','string','from deserializer'
-3 rows selected 
->>>  
->>>  LOAD DATA LOCAL INPATH '../data/files/doctors.avro' INTO TABLE doctors;
-No rows affected 
->>>  
->>>  SELECT * FROM doctors ORDER BY number;
-'number','first_name','last_name'
-'1','William','Hartnell'
-'2','Patrick','Troughton'
-'3','Jon','Pertwee'
-'4','Tom','Baker'
-'5','Peter','Davison'
-'6','Colin','Baker'
-'7','Sylvester','McCoy'
-'8','Paul','McGann'
-'9','Christopher','Eccleston'
-'10','David','Tennant'
-'11','Matt','Smith'
-11 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/avro_schema_literal.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/avro_schema_literal.q.out b/ql/src/test/results/beelinepositive/avro_schema_literal.q.out
deleted file mode 100644
index 45ae129..0000000
--- a/ql/src/test/results/beelinepositive/avro_schema_literal.q.out
+++ /dev/null
@@ -1,54 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/avro_schema_literal.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/avro_schema_literal.q
->>>  CREATE TABLE avro1 
-ROW FORMAT 
-SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' 
-TBLPROPERTIES ('avro.schema.literal'='{ 
-"namespace": "org.apache.hive", 
-"name": "big_old_schema", 
-"type": "record", 
-"fields": [ 
-{ "name":"string1", "type":"string" }, 
-{ "name":"int1", "type":"int" }, 
-{ "name":"tinyint1", "type":"int" }, 
-{ "name":"smallint1", "type":"int" }, 
-{ "name":"bigint1", "type":"long" }, 
-{ "name":"boolean1", "type":"boolean" }, 
-{ "name":"float1", "type":"float" }, 
-{ "name":"double1", "type":"double" }, 
-{ "name":"list1", "type":{"type":"array", "items":"string"} }, 
-{ "name":"map1", "type":{"type":"map", "values":"int"} }, 
-{ "name":"struct1", "type":{"type":"record", "name":"struct1_name", "fields": [ 
-{ "name":"sInt", "type":"int" }, { "name":"sBoolean", "type":"boolean" }, { "name":"sString", "type":"string" } ] } }, 
-{ "name":"union1", "type":["float", "boolean", "string"] }, 
-{ "name":"enum1", "type":{"type":"enum", "name":"enum1_values", "symbols":["BLUE","RED", "GREEN"]} }, 
-{ "name":"nullableint", "type":["int", "null"] }, 
-{ "name":"bytes1", "type":"bytes" }, 
-{ "name":"fixed1", "type":{"type":"fixed", "name":"threebytes", "size":3} } 
-] }');
-No rows affected 
->>>  
->>>  DESCRIBE avro1;
-'col_name','data_type','comment'
-'string1','string','from deserializer'
-'int1','int','from deserializer'
-'tinyint1','int','from deserializer'
-'smallint1','int','from deserializer'
-'bigint1','bigint','from deserializer'
-'boolean1','boolean','from deserializer'
-'float1','float','from deserializer'
-'double1','double','from deserializer'
-'list1','array<string>','from deserializer'
-'map1','map<string,int>','from deserializer'
-'struct1','struct<sint:int,sboolean:boolean,sstring:string>','from deserializer'
-'union1','uniontype<float,boolean,string>','from deserializer'
-'enum1','string','from deserializer'
-'nullableint','int','from deserializer'
-'bytes1','array<tinyint>','from deserializer'
-'fixed1','array<tinyint>','from deserializer'
-16 rows selected 
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/ba_table_union.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/ba_table_union.q.out b/ql/src/test/results/beelinepositive/ba_table_union.q.out
deleted file mode 100644
index b8b9c6a..0000000
--- a/ql/src/test/results/beelinepositive/ba_table_union.q.out
+++ /dev/null
@@ -1,40 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/ba_table_union.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/ba_table_union.q
->>>  drop table ba_test;
-No rows affected 
->>>  
->>>  -- this query tests ba_table1.q + nested queries with multiple operations on binary data types + union on binary types
->>>  create table ba_test (ba_key binary, ba_val binary) ;
-No rows affected 
->>>  
->>>  describe extended ba_test;
-'col_name','data_type','comment'
-'ba_key','binary',''
-'ba_val','binary',''
-'','',''
-'Detailed Table Information','Table(tableName:ba_test, dbName:ba_table_union, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:ba_key, type:binary, comment:null), FieldSchema(name:ba_val, type:binary, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/ba_table_union.db/ba_test, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[], parameters:{transient_lastDdlTime=!!UNIXTIME!!}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)',''
-4 rows selected 
->>>  
->>>  from src insert overwrite table ba_test select cast (src.key as binary), cast (src.value as binary);
-'_c0','_c1'
-No rows selected 
->>>  
->>>  select * from ( select key  from src where key < 50 union all select cast(ba_key as string) as key from ba_test limit 50) unioned order by key limit 10;
-'key'
-'0'
-'0'
-'0'
-'10'
-'11'
-'12'
-'12'
-'128'
-'145'
-'146'
-10 rows selected 
->>>  
->>>  drop table ba_test;
-No rows affected 
->>>  
->>>  
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/binary_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/binary_constant.q.out b/ql/src/test/results/beelinepositive/binary_constant.q.out
deleted file mode 100644
index 63b3046..0000000
--- a/ql/src/test/results/beelinepositive/binary_constant.q.out
+++ /dev/null
@@ -1,7 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/binary_constant.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/binary_constant.q
->>>  select cast(cast('a' as binary) as string) from src limit 1;
-'_c0'
-'a'
-1 row selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/binary_output_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/binary_output_format.q.out b/ql/src/test/results/beelinepositive/binary_output_format.q.out
deleted file mode 100644
index 64257ca..0000000
--- a/ql/src/test/results/beelinepositive/binary_output_format.q.out
+++ /dev/null
@@ -1,859 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/binary_output_format.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/binary_output_format.q
->>>  -- Create a table with binary output format
->>>  CREATE TABLE dest1(mydata STRING) 
-ROW FORMAT SERDE 
-'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
-WITH SERDEPROPERTIES ( 
-'serialization.last.column.takes.rest'='true' 
-) 
-STORED AS 
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' 
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat';
-No rows affected 
->>>  
->>>  -- Insert into that table using transform
->>>  EXPLAIN EXTENDED 
-INSERT OVERWRITE TABLE dest1 
-SELECT TRANSFORM(*) 
-USING 'cat' 
-AS mydata STRING 
-ROW FORMAT SERDE 
-'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
-WITH SERDEPROPERTIES ( 
-'serialization.last.column.takes.rest'='true' 
-) 
-RECORDREADER 'org.apache.hadoop.hive.ql.exec.BinaryRecordReader' 
-FROM src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST TOK_ALLCOLREF) TOK_SERDE TOK_RECORDWRITER 'cat' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' (TOK_TABLEPROPERTIES (TOK_TABLEPROPLIST (TOK_TABLEPROPERTY 'serialization.last.column.takes.rest' 'true'))))) (TOK_RECORDREADER 'org.apache.hadoop.hive.ql.exec.BinaryRecordReader') (TOK_TABCOLLIST (TOK_TABCOL mydata TOK_STRING)))))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5'
-'  Stage-4'
-'  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6'
-'  Stage-2 depends on stages: Stage-0'
-'  Stage-3'
-'  Stage-5'
-'  Stage-6 depends on stages: Stage-5'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Transform Operator'
-'                command: cat'
-'                output info:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    properties:'
-'                      columns _col0'
-'                      columns.types string'
-'                      field.delim 9'
-'                      serialization.format 9'
-'                      serialization.last.column.takes.rest true'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 1'
-'                  directory: pfile:!!{hive.exec.scratchdir}!!'
-'                  NumFilesPerFileSink: 1'
-'                  Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                      properties:'
-'                        bucket_count -1'
-'                        columns mydata'
-'                        columns.types string'
-'                        file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                        file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                        location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                        name binary_output_format.dest1'
-'                        serialization.ddl struct dest1 { string mydata}'
-'                        serialization.format 1'
-'                        serialization.last.column.takes.rest true'
-'                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                        transient_lastDdlTime !!UNIXTIME!!'
-'                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                      name: binary_output_format.dest1'
-'                  TotalFiles: 1'
-'                  GatherStats: true'
-'                  MultiFileSpray: false'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/src [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/src '
-'          Partition'
-'            base file name: src'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/src'
-'              name binary_output_format.src'
-'              numFiles 1'
-'              numPartitions 0'
-'              numRows 0'
-'              rawDataSize 0'
-'              serialization.ddl struct src { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/src'
-'                name binary_output_format.src'
-'                numFiles 1'
-'                numPartitions 0'
-'                numRows 0'
-'                rawDataSize 0'
-'                serialization.ddl struct src { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 5812'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: binary_output_format.src'
-'            name: binary_output_format.src'
-''
-'  Stage: Stage-7'
-'    Conditional Operator'
-''
-'  Stage: Stage-4'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns mydata'
-'                columns.types string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                name binary_output_format.dest1'
-'                serialization.ddl struct dest1 { string mydata}'
-'                serialization.format 1'
-'                serialization.last.column.takes.rest true'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: binary_output_format.dest1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-3'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 1'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                  properties:'
-'                    bucket_count -1'
-'                    columns mydata'
-'                    columns.types string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                    name binary_output_format.dest1'
-'                    serialization.ddl struct dest1 { string mydata}'
-'                    serialization.format 1'
-'                    serialization.last.column.takes.rest true'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: binary_output_format.dest1'
-'              TotalFiles: 1'
-'              GatherStats: false'
-'              MultiFileSpray: false'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]'
-'      Path -> Partition:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'          Partition'
-'            base file name: -ext-10002'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns mydata'
-'              columns.types string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'              name binary_output_format.dest1'
-'              serialization.ddl struct dest1 { string mydata}'
-'              serialization.format 1'
-'              serialization.last.column.takes.rest true'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns mydata'
-'                columns.types string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                name binary_output_format.dest1'
-'                serialization.ddl struct dest1 { string mydata}'
-'                serialization.format 1'
-'                serialization.last.column.takes.rest true'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: binary_output_format.dest1'
-'            name: binary_output_format.dest1'
-''
-'  Stage: Stage-5'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 0'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 1'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                  properties:'
-'                    bucket_count -1'
-'                    columns mydata'
-'                    columns.types string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                    name binary_output_format.dest1'
-'                    serialization.ddl struct dest1 { string mydata}'
-'                    serialization.format 1'
-'                    serialization.last.column.takes.rest true'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: binary_output_format.dest1'
-'              TotalFiles: 1'
-'              GatherStats: false'
-'              MultiFileSpray: false'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]'
-'      Path -> Partition:'
-'        pfile:!!{hive.exec.scratchdir}!! '
-'          Partition'
-'            base file name: -ext-10002'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns mydata'
-'              columns.types string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'              name binary_output_format.dest1'
-'              serialization.ddl struct dest1 { string mydata}'
-'              serialization.format 1'
-'              serialization.last.column.takes.rest true'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns mydata'
-'                columns.types string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/binary_output_format.db/dest1'
-'                name binary_output_format.dest1'
-'                serialization.ddl struct dest1 { string mydata}'
-'                serialization.format 1'
-'                serialization.last.column.takes.rest true'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: binary_output_format.dest1'
-'            name: binary_output_format.dest1'
-''
-'  Stage: Stage-6'
-'    Move Operator'
-'      files:'
-'          hdfs directory: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          destination: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-310 rows selected 
->>>  
->>>  INSERT OVERWRITE TABLE dest1 
-SELECT TRANSFORM(*) 
-USING 'cat' 
-AS mydata STRING 
-ROW FORMAT SERDE 
-'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
-WITH SERDEPROPERTIES ( 
-'serialization.last.column.takes.rest'='true' 
-) 
-RECORDREADER 'org.apache.hadoop.hive.ql.exec.BinaryRecordReader' 
-FROM src;
-'mydata'
-No rows selected 
->>>  
->>>  -- Test the result
->>>  SELECT * FROM dest1;
-'mydata'
-'238	val_238'
-'86	val_86'
-'311	val_311'
-'27	val_27'
-'165	val_165'
-'409	val_409'
-'255	val_255'
-'278	val_278'
-'98	val_98'
-'484	val_484'
-'265	val_265'
-'193	val_193'
-'401	val_401'
-'150	val_150'
-'273	val_273'
-'224	val_224'
-'369	val_369'
-'66	val_66'
-'128	val_128'
-'213	val_213'
-'146	val_146'
-'406	val_406'
-'429	val_429'
-'374	val_374'
-'152	val_152'
-'469	val_469'
-'145	val_145'
-'495	val_495'
-'37	val_37'
-'327	val_327'
-'281	val_281'
-'277	val_277'
-'209	val_209'
-'15	val_15'
-'82	val_82'
-'403	val_403'
-'166	val_166'
-'417	val_417'
-'430	val_430'
-'252	val_252'
-'292	val_292'
-'219	val_219'
-'287	val_287'
-'153	val_153'
-'193	val_193'
-'338	val_338'
-'446	val_446'
-'459	val_459'
-'394	val_394'
-'237	val_237'
-'482	val_482'
-'174	val_174'
-'413	val_413'
-'494	val_494'
-'207	val_207'
-'199	val_199'
-'466	val_466'
-'208	val_208'
-'174	val_174'
-'399	val_399'
-'396	val_396'
-'247	val_247'
-'417	val_417'
-'489	val_489'
-'162	val_162'
-'377	val_377'
-'397	val_397'
-'309	val_309'
-'365	val_365'
-'266	val_266'
-'439	val_439'
-'342	val_342'
-'367	val_367'
-'325	val_325'
-'167	val_167'
-'195	val_195'
-'475	val_475'
-'17	val_17'
-'113	val_113'
-'155	val_155'
-'203	val_203'
-'339	val_339'
-'0	val_0'
-'455	val_455'
-'128	val_128'
-'311	val_311'
-'316	val_316'
-'57	val_57'
-'302	val_302'
-'205	val_205'
-'149	val_149'
-'438	val_438'
-'345	val_345'
-'129	val_129'
-'170	val_170'
-'20	val_20'
-'489	val_489'
-'157	val_157'
-'378	val_378'
-'221	val_221'
-'92	val_92'
-'111	val_111'
-'47	val_47'
-'72	val_72'
-'4	val_4'
-'280	val_280'
-'35	val_35'
-'427	val_427'
-'277	val_277'
-'208	val_208'
-'356	val_356'
-'399	val_399'
-'169	val_169'
-'382	val_382'
-'498	val_498'
-'125	val_125'
-'386	val_386'
-'437	val_437'
-'469	val_469'
-'192	val_192'
-'286	val_286'
-'187	val_187'
-'176	val_176'
-'54	val_54'
-'459	val_459'
-'51	val_51'
-'138	val_138'
-'103	val_103'
-'239	val_239'
-'213	val_213'
-'216	val_216'
-'430	val_430'
-'278	val_278'
-'176	val_176'
-'289	val_289'
-'221	val_221'
-'65	val_65'
-'318	val_318'
-'332	val_332'
-'311	val_311'
-'275	val_275'
-'137	val_137'
-'241	val_241'
-'83	val_83'
-'333	val_333'
-'180	val_180'
-'284	val_284'
-'12	val_12'
-'230	val_230'
-'181	val_181'
-'67	val_67'
-'260	val_260'
-'404	val_404'
-'384	val_384'
-'489	val_489'
-'353	val_353'
-'373	val_373'
-'272	val_272'
-'138	val_138'
-'217	val_217'
-'84	val_84'
-'348	val_348'
-'466	val_466'
-'58	val_58'
-'8	val_8'
-'411	val_411'
-'230	val_230'
-'208	val_208'
-'348	val_348'
-'24	val_24'
-'463	val_463'
-'431	val_431'
-'179	val_179'
-'172	val_172'
-'42	val_42'
-'129	val_129'
-'158	val_158'
-'119	val_119'
-'496	val_496'
-'0	val_0'
-'322	val_322'
-'197	val_197'
-'468	val_468'
-'393	val_393'
-'454	val_454'
-'100	val_100'
-'298	val_298'
-'199	val_199'
-'191	val_191'
-'418	val_418'
-'96	val_96'
-'26	val_26'
-'165	val_165'
-'327	val_327'
-'230	val_230'
-'205	val_205'
-'120	val_120'
-'131	val_131'
-'51	val_51'
-'404	val_404'
-'43	val_43'
-'436	val_436'
-'156	val_156'
-'469	val_469'
-'468	val_468'
-'308	val_308'
-'95	val_95'
-'196	val_196'
-'288	val_288'
-'481	val_481'
-'457	val_457'
-'98	val_98'
-'282	val_282'
-'197	val_197'
-'187	val_187'
-'318	val_318'
-'318	val_318'
-'409	val_409'
-'470	val_470'
-'137	val_137'
-'369	val_369'
-'316	val_316'
-'169	val_169'
-'413	val_413'
-'85	val_85'
-'77	val_77'
-'0	val_0'
-'490	val_490'
-'87	val_87'
-'364	val_364'
-'179	val_179'
-'118	val_118'
-'134	val_134'
-'395	val_395'
-'282	val_282'
-'138	val_138'
-'238	val_238'
-'419	val_419'
-'15	val_15'
-'118	val_118'
-'72	val_72'
-'90	val_90'
-'307	val_307'
-'19	val_19'
-'435	val_435'
-'10	val_10'
-'277	val_277'
-'273	val_273'
-'306	val_306'
-'224	val_224'
-'309	val_309'
-'389	val_389'
-'327	val_327'
-'242	val_242'
-'369	val_369'
-'392	val_392'
-'272	val_272'
-'331	val_331'
-'401	val_401'
-'242	val_242'
-'452	val_452'
-'177	val_177'
-'226	val_226'
-'5	val_5'
-'497	val_497'
-'402	val_402'
-'396	val_396'
-'317	val_317'
-'395	val_395'
-'58	val_58'
-'35	val_35'
-'336	val_336'
-'95	val_95'
-'11	val_11'
-'168	val_168'
-'34	val_34'
-'229	val_229'
-'233	val_233'
-'143	val_143'
-'472	val_472'
-'322	val_322'
-'498	val_498'
-'160	val_160'
-'195	val_195'
-'42	val_42'
-'321	val_321'
-'430	val_430'
-'119	val_119'
-'489	val_489'
-'458	val_458'
-'78	val_78'
-'76	val_76'
-'41	val_41'
-'223	val_223'
-'492	val_492'
-'149	val_149'
-'449	val_449'
-'218	val_218'
-'228	val_228'
-'138	val_138'
-'453	val_453'
-'30	val_30'
-'209	val_209'
-'64	val_64'
-'468	val_468'
-'76	val_76'
-'74	val_74'
-'342	val_342'
-'69	val_69'
-'230	val_230'
-'33	val_33'
-'368	val_368'
-'103	val_103'
-'296	val_296'
-'113	val_113'
-'216	val_216'
-'367	val_367'
-'344	val_344'
-'167	val_167'
-'274	val_274'
-'219	val_219'
-'239	val_239'
-'485	val_485'
-'116	val_116'
-'223	val_223'
-'256	val_256'
-'263	val_263'
-'70	val_70'
-'487	val_487'
-'480	val_480'
-'401	val_401'
-'288	val_288'
-'191	val_191'
-'5	val_5'
-'244	val_244'
-'438	val_438'
-'128	val_128'
-'467	val_467'
-'432	val_432'
-'202	val_202'
-'316	val_316'
-'229	val_229'
-'469	val_469'
-'463	val_463'
-'280	val_280'
-'2	val_2'
-'35	val_35'
-'283	val_283'
-'331	val_331'
-'235	val_235'
-'80	val_80'
-'44	val_44'
-'193	val_193'
-'321	val_321'
-'335	val_335'
-'104	val_104'
-'466	val_466'
-'366	val_366'
-'175	val_175'
-'403	val_403'
-'483	val_483'
-'53	val_53'
-'105	val_105'
-'257	val_257'
-'406	val_406'
-'409	val_409'
-'190	val_190'
-'406	val_406'
-'401	val_401'
-'114	val_114'
-'258	val_258'
-'90	val_90'
-'203	val_203'
-'262	val_262'
-'348	val_348'
-'424	val_424'
-'12	val_12'
-'396	val_396'
-'201	val_201'
-'217	val_217'
-'164	val_164'
-'431	val_431'
-'454	val_454'
-'478	val_478'
-'298	val_298'
-'125	val_125'
-'431	val_431'
-'164	val_164'
-'424	val_424'
-'187	val_187'
-'382	val_382'
-'5	val_5'
-'70	val_70'
-'397	val_397'
-'480	val_480'
-'291	val_291'
-'24	val_24'
-'351	val_351'
-'255	val_255'
-'104	val_104'
-'70	val_70'
-'163	val_163'
-'438	val_438'
-'119	val_119'
-'414	val_414'
-'200	val_200'
-'491	val_491'
-'237	val_237'
-'439	val_439'
-'360	val_360'
-'248	val_248'
-'479	val_479'
-'305	val_305'
-'417	val_417'
-'199	val_199'
-'444	val_444'
-'120	val_120'
-'429	val_429'
-'169	val_169'
-'443	val_443'
-'323	val_323'
-'325	val_325'
-'277	val_277'
-'230	val_230'
-'478	val_478'
-'178	val_178'
-'468	val_468'
-'310	val_310'
-'317	val_317'
-'333	val_333'
-'493	val_493'
-'460	val_460'
-'207	val_207'
-'249	val_249'
-'265	val_265'
-'480	val_480'
-'83	val_83'
-'136	val_136'
-'353	val_353'
-'172	val_172'
-'214	val_214'
-'462	val_462'
-'233	val_233'
-'406	val_406'
-'133	val_133'
-'175	val_175'
-'189	val_189'
-'454	val_454'
-'375	val_375'
-'401	val_401'
-'421	val_421'
-'407	val_407'
-'384	val_384'
-'256	val_256'
-'26	val_26'
-'134	val_134'
-'67	val_67'
-'384	val_384'
-'379	val_379'
-'18	val_18'
-'462	val_462'
-'492	val_492'
-'100	val_100'
-'298	val_298'
-'9	val_9'
-'341	val_341'
-'498	val_498'
-'146	val_146'
-'458	val_458'
-'362	val_362'
-'186	val_186'
-'285	val_285'
-'348	val_348'
-'167	val_167'
-'18	val_18'
-'273	val_273'
-'183	val_183'
-'281	val_281'
-'344	val_344'
-'97	val_97'
-'469	val_469'
-'315	val_315'
-'84	val_84'
-'28	val_28'
-'37	val_37'
-'448	val_448'
-'152	val_152'
-'348	val_348'
-'307	val_307'
-'194	val_194'
-'414	val_414'
-'477	val_477'
-'222	val_222'
-'126	val_126'
-'90	val_90'
-'169	val_169'
-'403	val_403'
-'400	val_400'
-'200	val_200'
-'97	val_97'
-500 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/binarysortable_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/binarysortable_1.q.out b/ql/src/test/results/beelinepositive/binarysortable_1.q.out
deleted file mode 100644
index a08d35d..0000000
--- a/ql/src/test/results/beelinepositive/binarysortable_1.q.out
+++ /dev/null
@@ -1,118 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/binarysortable_1.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/binarysortable_1.q
->>>  CREATE TABLE mytable(key STRING, value STRING) 
-ROW FORMAT DELIMITED 
-FIELDS TERMINATED BY '9' 
-STORED AS TEXTFILE;
-No rows affected 
->>>  
->>>  LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE mytable;
-No rows affected 
->>>  
->>>  EXPLAIN 
-SELECT REGEXP_REPLACE(REGEXP_REPLACE(REGEXP_REPLACE(key, '\001', '^A'), '\0', '^@'), '\002', '^B'), value 
-FROM ( 
-SELECT key, sum(value) as value 
-FROM mytable 
-GROUP BY key 
-) a;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME mytable))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL value)) value)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION REGEXP_REPLACE (TOK_FUNCTION REGEXP_REPLACE (TOK_FUNCTION REGEXP_REPLACE (TOK_TABLE_OR_COL key) '\001' '^A') '\0' '^@') '\002' '^B')) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        a:mytable '
-'          TableScan'
-'            alias: mytable'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: key, value'
-'              Group By Operator'
-'                aggregations:'
-'                      expr: sum(value)'
-'                bucketGroup: false'
-'                keys:'
-'                      expr: key'
-'                      type: string'
-'                mode: hash'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: string'
-'                  sort order: +'
-'                  Map-reduce partition columns:'
-'                        expr: _col0'
-'                        type: string'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col1'
-'                        type: double'
-'      Reduce Operator Tree:'
-'        Group By Operator'
-'          aggregations:'
-'                expr: sum(VALUE._col0)'
-'          bucketGroup: false'
-'          keys:'
-'                expr: KEY._col0'
-'                type: string'
-'          mode: mergepartial'
-'          outputColumnNames: _col0, _col1'
-'          Select Operator'
-'            expressions:'
-'                  expr: _col0'
-'                  type: string'
-'                  expr: _col1'
-'                  type: double'
-'            outputColumnNames: _col0, _col1'
-'            Select Operator'
-'              expressions:'
-'                    expr: regexp_replace(regexp_replace(regexp_replace(_col0, ''
-'                    type: string'
-'                    expr: _col1'
-'                    type: double'
-'              outputColumnNames: _col0, _col1'
-'              File Output Operator'
-'                compressed: false'
-'                GlobalTableId: 0'
-'                table:'
-'                    input format: org.apache.hadoop.mapred.TextInputFormat'
-'                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-78 rows selected 
->>>  
->>>  SELECT REGEXP_REPLACE(REGEXP_REPLACE(REGEXP_REPLACE(key, '\001', '^A'), '\0', '^@'), '\002', '^B'), value 
-FROM ( 
-SELECT key, sum(value) as value 
-FROM mytable 
-GROUP BY key 
-) a;
-'_c0','value'
-'^@^@^@','7.0'
-'^@^A^@','9.0'
-'^@test^@','2.0'
-'^A^@^A','10.0'
-'^A^A^A','8.0'
-'^Atest^A','3.0'
-'a^@bc^A^B^A^@','1.0'
-'test^@^@^A^Atest','6.0'
-'test^@test','4.0'
-'test^Atest','5.0'
-10 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/bucket1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/bucket1.q.out b/ql/src/test/results/beelinepositive/bucket1.q.out
deleted file mode 100644
index 295d786..0000000
--- a/ql/src/test/results/beelinepositive/bucket1.q.out
+++ /dev/null
@@ -1,675 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/bucket1.q.raw". Enter "record" with no arguments to stop it.
->>>  !run !!{qFileDirectory}!!/bucket1.q
->>>  set hive.enforce.bucketing = true;
-No rows affected 
->>>  set hive.exec.reducers.max = 200;
-No rows affected 
->>>  
->>>  CREATE TABLE bucket1_1(key int, value string) CLUSTERED BY (key) INTO 100 BUCKETS;
-No rows affected 
->>>  
->>>  explain extended 
-insert overwrite table bucket1_1 
-select * from src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucket1_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Reduce Output Operator'
-'                sort order: '
-'                Map-reduce partition columns:'
-'                      expr: UDFToInteger(_col0)'
-'                      type: int'
-'                tag: -1'
-'                value expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: string'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket1.db/src [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket1.db/src '
-'          Partition'
-'            base file name: src'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/bucket1.db/src'
-'              name bucket1.src'
-'              numFiles 1'
-'              numPartitions 0'
-'              numRows 0'
-'              rawDataSize 0'
-'              serialization.ddl struct src { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket1.db/src'
-'                name bucket1.src'
-'                numFiles 1'
-'                numPartitions 0'
-'                numRows 0'
-'                rawDataSize 0'
-'                serialization.ddl struct src { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 5812'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket1.src'
-'            name: bucket1.src'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: UDFToInteger(_col0)'
-'                  type: int'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 1'
-'              Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  properties:'
-'                    bucket_count 100'
-'                    bucket_field_name key'
-'                    columns key,value'
-'                    columns.types int:string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/bucket1.db/bucket1_1'
-'                    name bucket1.bucket1_1'
-'                    serialization.ddl struct bucket1_1 { i32 key, string value}'
-'                    serialization.format 1'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: bucket1.bucket1_1'
-'              TotalFiles: 1'
-'              GatherStats: true'
-'              MultiFileSpray: false'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count 100'
-'                bucket_field_name key'
-'                columns key,value'
-'                columns.types int:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket1.db/bucket1_1'
-'                name bucket1.bucket1_1'
-'                serialization.ddl struct bucket1_1 { i32 key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket1.bucket1_1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-150 rows selected 
->>>  
->>>  insert overwrite table bucket1_1 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  select * from bucket1_1 order by key;
-'key','value'
-'0','val_0'
-'0','val_0'
-'0','val_0'
-'2','val_2'
-'4','val_4'
-'5','val_5'
-'5','val_5'
-'5','val_5'
-'8','val_8'
-'9','val_9'
-'10','val_10'
-'11','val_11'
-'12','val_12'
-'12','val_12'
-'15','val_15'
-'15','val_15'
-'17','val_17'
-'18','val_18'
-'18','val_18'
-'19','val_19'
-'20','val_20'
-'24','val_24'
-'24','val_24'
-'26','val_26'
-'26','val_26'
-'27','val_27'
-'28','val_28'
-'30','val_30'
-'33','val_33'
-'34','val_34'
-'35','val_35'
-'35','val_35'
-'35','val_35'
-'37','val_37'
-'37','val_37'
-'41','val_41'
-'42','val_42'
-'42','val_42'
-'43','val_43'
-'44','val_44'
-'47','val_47'
-'51','val_51'
-'51','val_51'
-'53','val_53'
-'54','val_54'
-'57','val_57'
-'58','val_58'
-'58','val_58'
-'64','val_64'
-'65','val_65'
-'66','val_66'
-'67','val_67'
-'67','val_67'
-'69','val_69'
-'70','val_70'
-'70','val_70'
-'70','val_70'
-'72','val_72'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'76','val_76'
-'77','val_77'
-'78','val_78'
-'80','val_80'
-'82','val_82'
-'83','val_83'
-'83','val_83'
-'84','val_84'
-'84','val_84'
-'85','val_85'
-'86','val_86'
-'87','val_87'
-'90','val_90'
-'90','val_90'
-'90','val_90'
-'92','val_92'
-'95','val_95'
-'95','val_95'
-'96','val_96'
-'97','val_97'
-'97','val_97'
-'98','val_98'
-'98','val_98'
-'100','val_100'
-'100','val_100'
-'103','val_103'
-'103','val_103'
-'104','val_104'
-'104','val_104'
-'105','val_105'
-'111','val_111'
-'113','val_113'
-'113','val_113'
-'114','val_114'
-'116','val_116'
-'118','val_118'
-'118','val_118'
-'119','val_119'
-'119','val_119'
-'119','val_119'
-'120','val_120'
-'120','val_120'
-'125','val_125'
-'125','val_125'
-'126','val_126'
-'128','val_128'
-'128','val_128'
-'128','val_128'
-'129','val_129'
-'129','val_129'
-'131','val_131'
-'133','val_133'
-'134','val_134'
-'134','val_134'
-'136','val_136'
-'137','val_137'
-'137','val_137'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'143','val_143'
-'145','val_145'
-'146','val_146'
-'146','val_146'
-'149','val_149'
-'149','val_149'
-'150','val_150'
-'152','val_152'
-'152','val_152'
-'153','val_153'
-'155','val_155'
-'156','val_156'
-'157','val_157'
-'158','val_158'
-'160','val_160'
-'162','val_162'
-'163','val_163'
-'164','val_164'
-'164','val_164'
-'165','val_165'
-'165','val_165'
-'166','val_166'
-'167','val_167'
-'167','val_167'
-'167','val_167'
-'168','val_168'
-'169','val_169'
-'169','val_169'
-'169','val_169'
-'169','val_169'
-'170','val_170'
-'172','val_172'
-'172','val_172'
-'174','val_174'
-'174','val_174'
-'175','val_175'
-'175','val_175'
-'176','val_176'
-'176','val_176'
-'177','val_177'
-'178','val_178'
-'179','val_179'
-'179','val_179'
-'180','val_180'
-'181','val_181'
-'183','val_183'
-'186','val_186'
-'187','val_187'
-'187','val_187'
-'187','val_187'
-'189','val_189'
-'190','val_190'
-'191','val_191'
-'191','val_191'
-'192','val_192'
-'193','val_193'
-'193','val_193'
-'193','val_193'
-'194','val_194'
-'195','val_195'
-'195','val_195'
-'196','val_196'
-'197','val_197'
-'197','val_197'
-'199','val_199'
-'199','val_199'
-'199','val_199'
-'200','val_200'
-'200','val_200'
-'201','val_201'
-'202','val_202'
-'203','val_203'
-'203','val_203'
-'205','val_205'
-'205','val_205'
-'207','val_207'
-'207','val_207'
-'208','val_208'
-'208','val_208'
-'208','val_208'
-'209','val_209'
-'209','val_209'
-'213','val_213'
-'213','val_213'
-'214','val_214'
-'216','val_216'
-'216','val_216'
-'217','val_217'
-'217','val_217'
-'218','val_218'
-'219','val_219'
-'219','val_219'
-'221','val_221'
-'221','val_221'
-'222','val_222'
-'223','val_223'
-'223','val_223'
-'224','val_224'
-'224','val_224'
-'226','val_226'
-'228','val_228'
-'229','val_229'
-'229','val_229'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'233','val_233'
-'233','val_233'
-'235','val_235'
-'237','val_237'
-'237','val_237'
-'238','val_238'
-'238','val_238'
-'239','val_239'
-'239','val_239'
-'241','val_241'
-'242','val_242'
-'242','val_242'
-'244','val_244'
-'247','val_247'
-'248','val_248'
-'249','val_249'
-'252','val_252'
-'255','val_255'
-'255','val_255'
-'256','val_256'
-'256','val_256'
-'257','val_257'
-'258','val_258'
-'260','val_260'
-'262','val_262'
-'263','val_263'
-'265','val_265'
-'265','val_265'
-'266','val_266'
-'272','val_272'
-'272','val_272'
-'273','val_273'
-'273','val_273'
-'273','val_273'
-'274','val_274'
-'275','val_275'
-'277','val_277'
-'277','val_277'
-'277','val_277'
-'277','val_277'
-'278','val_278'
-'278','val_278'
-'280','val_280'
-'280','val_280'
-'281','val_281'
-'281','val_281'
-'282','val_282'
-'282','val_282'
-'283','val_283'
-'284','val_284'
-'285','val_285'
-'286','val_286'
-'287','val_287'
-'288','val_288'
-'288','val_288'
-'289','val_289'
-'291','val_291'
-'292','val_292'
-'296','val_296'
-'298','val_298'
-'298','val_298'
-'298','val_298'
-'302','val_302'
-'305','val_305'
-'306','val_306'
-'307','val_307'
-'307','val_307'
-'308','val_308'
-'309','val_309'
-'309','val_309'
-'310','val_310'
-'311','val_311'
-'311','val_311'
-'311','val_311'
-'315','val_315'
-'316','val_316'
-'316','val_316'
-'316','val_316'
-'317','val_317'
-'317','val_317'
-'318','val_318'
-'318','val_318'
-'318','val_318'
-'321','val_321'
-'321','val_321'
-'322','val_322'
-'322','val_322'
-'323','val_323'
-'325','val_325'
-'325','val_325'
-'327','val_327'
-'327','val_327'
-'327','val_327'
-'331','val_331'
-'331','val_331'
-'332','val_332'
-'333','val_333'
-'333','val_333'
-'335','val_335'
-'336','val_336'
-'338','val_338'
-'339','val_339'
-'341','val_341'
-'342','val_342'
-'342','val_342'
-'344','val_344'
-'344','val_344'
-'345','val_345'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'351','val_351'
-'353','val_353'
-'353','val_353'
-'356','val_356'
-'360','val_360'
-'362','val_362'
-'364','val_364'
-'365','val_365'
-'366','val_366'
-'367','val_367'
-'367','val_367'
-'368','val_368'
-'369','val_369'
-'369','val_369'
-'369','val_369'
-'373','val_373'
-'374','val_374'
-'375','val_375'
-'377','val_377'
-'378','val_378'
-'379','val_379'
-'382','val_382'
-'382','val_382'
-'384','val_384'
-'384','val_384'
-'384','val_384'
-'386','val_386'
-'389','val_389'
-'392','val_392'
-'393','val_393'
-'394','val_394'
-'395','val_395'
-'395','val_395'
-'396','val_396'
-'396','val_396'
-'396','val_396'
-'397','val_397'
-'397','val_397'
-'399','val_399'
-'399','val_399'
-'400','val_400'
-'401','val_401'
-'401','val_401'
-'401','val_401'
-'401','val_401'
-'401','val_401'
-'402','val_402'
-'403','val_403'
-'403','val_403'
-'403','val_403'
-'404','val_404'
-'404','val_404'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'407','val_407'
-'409','val_409'
-'409','val_409'
-'409','val_409'
-'411','val_411'
-'413','val_413'
-'413','val_413'
-'414','val_414'
-'414','val_414'
-'417','val_417'
-'417','val_417'
-'417','val_417'
-'418','val_418'
-'419','val_419'
-'421','val_421'
-'424','val_424'
-'424','val_424'
-'427','val_427'
-'429','val_429'
-'429','val_429'
-'430','val_430'
-'430','val_430'
-'430','val_430'
-'431','val_431'
-'431','val_431'
-'431','val_431'
-'432','val_432'
-'435','val_435'
-'436','val_436'
-'437','val_437'
-'438','val_438'
-'438','val_438'
-'438','val_438'
-'439','val_439'
-'439','val_439'
-'443','val_443'
-'444','val_444'
-'446','val_446'
-'448','val_448'
-'449','val_449'
-'452','val_452'
-'453','val_453'
-'454','val_454'
-'454','val_454'
-'454','val_454'
-'455','val_455'
-'457','val_457'
-'458','val_458'
-'458','val_458'
-'459','val_459'
-'459','val_459'
-'460','val_460'
-'462','val_462'
-'462','val_462'
-'463','val_463'
-'463','val_463'
-'466','val_466'
-'466','val_466'
-'466','val_466'
-'467','val_467'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'469','val_469'
-'469','val_469'
-'469','val_469'
-'469','val_469'
-'469','val_469'
-'470','val_470'
-'472','val_472'
-'475','val_475'
-'477','val_477'
-'478','val_478'
-'478','val_478'
-'479','val_479'
-'480','val_480'
-'480','val_480'
-'480','val_480'
-'481','val_481'
-'482','val_482'
-'483','val_483'
-'484','val_484'
-'485','val_485'
-'487','val_487'
-'489','val_489'
-'489','val_489'
-'489','val_489'
-'489','val_489'
-'490','val_490'
-'491','val_491'
-'492','val_492'
-'492','val_492'
-'493','val_493'
-'494','val_494'
-'495','val_495'
-'496','val_496'
-'497','val_497'
-'498','val_498'
-'498','val_498'
-'498','val_498'
-500 rows selected 
->>>  !record