You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/26 06:59:58 UTC
svn commit: r1476039 [11/22] - in /hive/branches/HIVE-4115: ./ beeline/
beeline/src/java/org/apache/hive/beeline/ bin/ builtins/ cli/
common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/
eclipse-templates/ hbase-handler/ hbase-handler/src/jav...
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_13_managed_location.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_13_managed_location.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_13_managed_location.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_13_managed_location.q Fri Apr 26 04:59:50 2013
@@ -6,7 +6,7 @@ create table exim_department ( dep_id in
stored as textfile
tblproperties("creator"="krishna");
load data local inpath "../data/files/test.dat" into table exim_department;
-dfs -mkdir ../build/ql/test/data/exports/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
dfs -rmr ../build/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -14,7 +14,7 @@ drop table exim_department;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_department;
import table exim_department from 'ql/test/data/exports/exim_department'
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_14_managed_location_over_existing.q Fri Apr 26 04:59:50 2013
@@ -6,7 +6,7 @@ create table exim_department ( dep_id in
stored as textfile
tblproperties("creator"="krishna");
load data local inpath "../data/files/test.dat" into table exim_department;
-dfs -mkdir ../build/ql/test/data/exports/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
dfs -rmr ../build/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
@@ -14,7 +14,7 @@ drop table exim_department;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_department/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_department;
create table exim_department ( dep_id int comment "department id")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_15_external_part.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_15_external_part.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_15_external_part.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_15_external_part.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -23,7 +23,7 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
create external table exim_employee ( emp_id int comment "employee id")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_16_part_external.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_16_part_external.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_16_part_external.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_16_part_external.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -23,9 +23,9 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
-dfs -mkdir ../build/ql/test/data/tablestore2/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore2/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore2/exim_employee;
create external table exim_employee ( emp_id int comment "employee id")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_17_part_managed.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_17_part_managed.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_17_part_managed.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_17_part_managed.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -23,7 +23,7 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
create table exim_employee ( emp_id int comment "employee id")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_18_part_external.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_18_part_external.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_18_part_external.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_18_part_external.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_00_part_external_location.q Fri Apr 26 04:59:50 2013
@@ -11,7 +11,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="in", emp_state="tn");
load data local inpath "../data/files/test2.dat"
into table exim_employee partition (emp_country="in", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -19,7 +19,7 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
import external table exim_employee
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_part_external_location.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_part_external_location.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_part_external_location.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_19_part_external_location.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -23,7 +23,7 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
import external table exim_employee partition (emp_country="us", emp_state="tn")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_20_part_managed_location.q Fri Apr 26 04:59:50 2013
@@ -15,7 +15,7 @@ load data local inpath "../data/files/te
into table exim_employee partition (emp_country="us", emp_state="tn");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="us", emp_state="ka");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
@@ -23,7 +23,7 @@ drop table exim_employee;
create database importer;
use importer;
-dfs -mkdir ../build/ql/test/data/tablestore/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/tablestore/exim_employee/temp;
dfs -rmr ../build/ql/test/data/tablestore/exim_employee;
import table exim_employee partition (emp_country="us", emp_state="tn")
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_21_export_authsuccess.q Fri Apr 26 04:59:50 2013
@@ -7,7 +7,7 @@ load data local inpath "../data/files/te
set hive.security.authorization.enabled=true;
grant Select on table exim_department to user hive_test_user;
-dfs -mkdir ../build/ql/test/data/exports/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
dfs -rmr ../build/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_22_import_exist_authsuccess.q Fri Apr 26 04:59:50 2013
@@ -4,7 +4,7 @@ set hive.test.mode.nosamplelist=exim_dep
create table exim_department ( dep_id int) stored as textfile;
load data local inpath "../data/files/test.dat" into table exim_department;
-dfs -mkdir ../build/ql/test/data/exports/exim_department/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/temp;
dfs -rmr ../build/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_23_import_part_authsuccess.q Fri Apr 26 04:59:50 2013
@@ -9,7 +9,7 @@ create table exim_employee ( emp_id int
tblproperties("creator"="krishna");
load data local inpath "../data/files/test.dat"
into table exim_employee partition (emp_country="in", emp_state="tn");
-dfs -mkdir ../build/ql/test/data/exports/exim_employee/temp;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_employee/temp;
dfs -rmr ../build/ql/test/data/exports/exim_employee;
export table exim_employee to 'ql/test/data/exports/exim_employee';
drop table exim_employee;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/exim_24_import_nonexist_authsuccess.q Fri Apr 26 04:59:50 2013
@@ -4,7 +4,7 @@ set hive.test.mode.nosamplelist=exim_dep
create table exim_department ( dep_id int) stored as textfile;
load data local inpath "../data/files/test.dat" into table exim_department;
-dfs -mkdir ../build/ql/test/data/exports/exim_department/test;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/exim_department/test;
dfs -rmr ../build/ql/test/data/exports/exim_department;
export table exim_department to 'ql/test/data/exports/exim_department';
drop table exim_department;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/groupby_sort_8.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/groupby_sort_8.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/groupby_sort_8.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/groupby_sort_8.q Fri Apr 26 04:59:50 2013
@@ -12,9 +12,16 @@ LOAD DATA LOCAL INPATH '../data/files/T1
INSERT OVERWRITE TABLE T1 PARTITION (ds='1') select key, val from T1 where ds = '1';
-- The plan is not converted to a map-side, since although the sorting columns and grouping
--- columns match, the user is issueing a distinct
+-- columns match, the user is issueing a distinct.
+-- However, after HIVE-4310, partial aggregation is performed on the mapper
EXPLAIN
select count(distinct key) from T1;
select count(distinct key) from T1;
-DROP TABLE T1;
\ No newline at end of file
+set hive.map.groupby.sorted.testmode=true;
+-- In testmode, the plan is not changed
+EXPLAIN
+select count(distinct key) from T1;
+select count(distinct key) from T1;
+
+DROP TABLE T1;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/insertexternal1.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/insertexternal1.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/insertexternal1.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/insertexternal1.q Fri Apr 26 04:59:50 2013
@@ -2,9 +2,9 @@
create table texternal(key string, val string) partitioned by (insertdate string);
-dfs -mkdir ${system:test.tmp.dir}/texternal/temp;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/texternal/temp;
dfs -rmr ${system:test.tmp.dir}/texternal;
-dfs -mkdir ${system:test.tmp.dir}/texternal/2008-01-01;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/texternal/2008-01-01;
alter table texternal add partition (insertdate='2008-01-01') location 'pfile://${system:test.tmp.dir}/texternal/2008-01-01';
from src insert overwrite table texternal partition (insertdate='2008-01-01') select *;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/leadlag.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/leadlag.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/leadlag.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/leadlag.q Fri Apr 26 04:59:50 2013
@@ -43,7 +43,7 @@ from part p1 join part p2 on p1.p_partke
-- 4. testLagInSum
select p_mfgr,p_name, p_size,
-sum(p_size - lag(p_size,1)) over(distribute by p_mfgr sort by p_mfgr ) as deltaSum
+sum(p_size - lag(p_size,1)) over(distribute by p_mfgr sort by p_name ) as deltaSum
from part
window w1 as (rows between 2 preceding and 2 following) ;
@@ -51,7 +51,7 @@ window w1 as (rows between 2 preceding a
select p_mfgr,p_name, p_size,
sum(p_size - lag(p_size,1)) over w1 as deltaSum
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following) ;
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and 2 following) ;
-- 6. testRankInLead
select p_mfgr, p_name, p_size, r1,
@@ -76,7 +76,7 @@ order by p_name
-- 8. testOverNoPartitionMultipleAggregate
select p_name, p_retailprice,
lead(p_retailprice) over() as l1 ,
-lag(p_retailprice) over() as l2
+lag(p_retailprice) over() as l2
from part
-order by p_name;
+where p_retailprice = 1173.15;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/metadata_export_drop.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/metadata_export_drop.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/metadata_export_drop.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/metadata_export_drop.q Fri Apr 26 04:59:50 2013
@@ -1,5 +1,5 @@
create table tmp_meta_export_listener_drop_test (foo string);
-dfs -mkdir ../build/ql/test/data/exports/HIVE-3427;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/exports/HIVE-3427;
set hive.metastore.pre.event.listeners=org.apache.hadoop.hive.ql.parse.MetaDataExportListener;
set hive.metadata.export.location=../build/ql/test/data/exports/HIVE-3427;
set hive.move.exported.metadata.to.trash=false;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multiMapJoin1.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multiMapJoin1.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multiMapJoin1.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multiMapJoin1.q Fri Apr 26 04:59:50 2013
@@ -52,6 +52,8 @@ smallTbl2 on (firstjoin.value1 = smallTb
set hive.auto.convert.join.noconditionaltask=true;
set hive.auto.convert.join.noconditionaltask.size=10000;
+-- Now run a query with two-way join, which should be converted into a
+-- map-join followed by groupby - two MR jobs overall
explain
select count(*) FROM
(select bigTbl.key as key, bigTbl.value as value1,
@@ -69,6 +71,32 @@ select count(*) FROM
JOIN
smallTbl2 on (firstjoin.value1 = smallTbl2.value);
+set hive.optimize.mapjoin.mapreduce=true;
+
+-- Now run a query with two-way join, which should first be converted into a
+-- map-join followed by groupby and then finally into a single MR job.
+
+explain insert overwrite directory '${system:test.tmp.dir}/multiJoin1.output'
+select count(*) FROM
+(select bigTbl.key as key, bigTbl.value as value1,
+ bigTbl.value as value2 FROM bigTbl JOIN smallTbl1
+ on (bigTbl.key = smallTbl1.key)
+) firstjoin
+JOIN
+smallTbl2 on (firstjoin.value1 = smallTbl2.value)
+group by smallTbl2.key;
+
+insert overwrite directory '${system:test.tmp.dir}/multiJoin1.output'
+select count(*) FROM
+(select bigTbl.key as key, bigTbl.value as value1,
+ bigTbl.value as value2 FROM bigTbl JOIN smallTbl1
+ on (bigTbl.key = smallTbl1.key)
+) firstjoin
+JOIN
+smallTbl2 on (firstjoin.value1 = smallTbl2.value)
+group by smallTbl2.key;
+set hive.optimize.mapjoin.mapreduce=false;
+
create table smallTbl3(key string, value string);
insert overwrite table smallTbl3 select * from src where key < 10;
@@ -101,6 +129,21 @@ select * from
set hive.auto.convert.join.noconditionaltask=false;
+explain
+select count(*) FROM
+ (
+ SELECT firstjoin.key1 as key1, firstjoin.key2 as key2, smallTbl2.key as key3,
+ firstjoin.value1 as value1, firstjoin.value2 as value2 FROM
+ (SELECT bigTbl.key1 as key1, bigTbl.key2 as key2,
+ bigTbl.value as value1, bigTbl.value as value2
+ FROM bigTbl JOIN smallTbl1
+ on (bigTbl.key1 = smallTbl1.key)
+ ) firstjoin
+ JOIN
+ smallTbl2 on (firstjoin.value1 = smallTbl2.value)
+ ) secondjoin
+ JOIN smallTbl3 on (secondjoin.key2 = smallTbl3.key);
+
select count(*) FROM
(
SELECT firstjoin.key1 as key1, firstjoin.key2 as key2, smallTbl2.key as key3,
@@ -118,7 +161,8 @@ select count(*) FROM
set hive.auto.convert.join.noconditionaltask=true;
set hive.auto.convert.join.noconditionaltask.size=10000;
--- join with 4 tables on different keys is also executed as a single MR job
+-- join with 4 tables on different keys is also executed as a single MR job,
+-- So, overall two jobs - one for multi-way join and one for count(*)
explain
select count(*) FROM
(
@@ -147,3 +191,37 @@ select count(*) FROM
smallTbl2 on (firstjoin.value1 = smallTbl2.value)
) secondjoin
JOIN smallTbl3 on (secondjoin.key2 = smallTbl3.key);
+
+set hive.optimize.mapjoin.mapreduce=true;
+-- Now run the above query with M-MR optimization
+-- This should be a single MR job end-to-end.
+explain
+select count(*) FROM
+ (
+ SELECT firstjoin.key1 as key1, firstjoin.key2 as key2, smallTbl2.key as key3,
+ firstjoin.value1 as value1, firstjoin.value2 as value2 FROM
+ (SELECT bigTbl.key1 as key1, bigTbl.key2 as key2,
+ bigTbl.value as value1, bigTbl.value as value2
+ FROM bigTbl JOIN smallTbl1
+ on (bigTbl.key1 = smallTbl1.key)
+ ) firstjoin
+ JOIN
+ smallTbl2 on (firstjoin.value1 = smallTbl2.value)
+ ) secondjoin
+ JOIN smallTbl3 on (secondjoin.key2 = smallTbl3.key);
+
+select count(*) FROM
+ (
+ SELECT firstjoin.key1 as key1, firstjoin.key2 as key2, smallTbl2.key as key3,
+ firstjoin.value1 as value1, firstjoin.value2 as value2 FROM
+ (SELECT bigTbl.key1 as key1, bigTbl.key2 as key2,
+ bigTbl.value as value1, bigTbl.value as value2
+ FROM bigTbl JOIN smallTbl1
+ on (bigTbl.key1 = smallTbl1.key)
+ ) firstjoin
+ JOIN
+ smallTbl2 on (firstjoin.value1 = smallTbl2.value)
+ ) secondjoin
+ JOIN smallTbl3 on (secondjoin.key2 = smallTbl3.key);
+
+set hive.optimize.mapjoin.mapreduce=false;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert.q Fri Apr 26 04:59:50 2013
@@ -196,7 +196,7 @@ select * from src_multi2 order by key, v
set hive.merge.mapfiles=false;
set hive.merge.mapredfiles=false;
-dfs -mkdir ${system:test.tmp.dir}/hive_test/multiins_local/temp;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/hive_test/multiins_local/temp;
dfs -rmr ${system:test.tmp.dir}/hive_test/multiins_local;
explain
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert_move_tasks_share_dependencies.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert_move_tasks_share_dependencies.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert_move_tasks_share_dependencies.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/multi_insert_move_tasks_share_dependencies.q Fri Apr 26 04:59:50 2013
@@ -196,7 +196,7 @@ select * from src_multi2 order by key, v
set hive.merge.mapfiles=false;
set hive.merge.mapredfiles=false;
-dfs -mkdir ${system:test.tmp.dir}/hive_test/multiins_local/temp;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/hive_test/multiins_local/temp;
dfs -rmr ${system:test.tmp.dir}/hive_test/multiins_local;
explain
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf.q Fri Apr 26 04:59:50 2013
@@ -182,7 +182,6 @@ CREATE TABLE part_5(
p_mfgr STRING,
p_name STRING,
p_size INT,
-s1 INT,
s2 INT,
r INT,
dr INT,
@@ -197,8 +196,7 @@ rank() over (distribute by p_mfgr sort b
dense_rank() over (distribute by p_mfgr sort by p_name) as dr,
sum(p_retailprice) over (distribute by p_mfgr sort by p_name rows between unbounded preceding and current row) as s
INSERT OVERWRITE TABLE part_5 select p_mfgr,p_name, p_size,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr, p_name rows between unbounded preceding and current row) as s1,
-sum(p_size) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
+round(sum(p_size),1) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
rank() over (distribute by p_mfgr sort by p_mfgr, p_name) as r,
dense_rank() over (distribute by p_mfgr sort by p_mfgr, p_name) as dr,
cume_dist() over (distribute by p_mfgr sort by p_mfgr, p_name) as cud,
@@ -294,11 +292,11 @@ from noop(on
-- 23. testMultiOperatorChainWithDiffPartitionForWindow2
select p_mfgr, p_name,
-rank() over (partition by p_mfgr order by p_mfgr) as r,
-dense_rank() over (partition by p_mfgr order by p_mfgr) as dr,
+rank() over (partition by p_mfgr order by p_name) as r,
+dense_rank() over (partition by p_mfgr order by p_name) as dr,
p_size,
-sum(p_size) over (partition by p_mfgr order by p_mfgr rows between unbounded preceding and current row) as s1,
-sum(p_size) over (partition by p_mfgr order by p_mfgr rows between unbounded preceding and current row) as s2
+sum(p_size) over (partition by p_mfgr order by p_name range between unbounded preceding and current row) as s1,
+sum(p_size) over (partition by p_mfgr order by p_name range between unbounded preceding and current row) as s2
from noopwithmap(on
noop(on
noop(on part
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf_general_queries.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf_general_queries.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf_general_queries.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/ptf_general_queries.q Fri Apr 26 04:59:50 2013
@@ -23,10 +23,10 @@ sort by p_name ;
-- 2. testUDAFsNoWindowingNoPTFNoGBY
select p_mfgr,p_name, p_retailprice,
-sum(p_retailprice) over(partition by p_mfgr order by p_mfgr) as s,
-min(p_retailprice) over(partition by p_mfgr order by p_mfgr) as mi,
-max(p_retailprice) over(partition by p_mfgr order by p_mfgr) as ma,
-avg(p_retailprice) over(partition by p_mfgr order by p_mfgr) as av
+sum(p_retailprice) over(partition by p_mfgr order by p_name) as s,
+min(p_retailprice) over(partition by p_mfgr order by p_name) as mi,
+max(p_retailprice) over(partition by p_mfgr order by p_name) as ma,
+avg(p_retailprice) over(partition by p_mfgr order by p_name) as av
from part
;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rename_external_partition_location.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rename_external_partition_location.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rename_external_partition_location.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/rename_external_partition_location.q Fri Apr 26 04:59:50 2013
@@ -1,5 +1,5 @@
-dfs -mkdir ${system:test.tmp.dir}/ex_table;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/ex_table;
CREATE EXTERNAL TABLE ex_table ( key INT, value STRING)
PARTITIONED BY (part STRING)
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/repair.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/repair.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/repair.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/repair.q Fri Apr 26 04:59:50 2013
@@ -2,8 +2,8 @@ CREATE TABLE repairtable(col STRING) PAR
MSCK TABLE repairtable;
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
+dfs ${system:test.dfs.mkdir} ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
dfs -touchz ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a/datafile;
MSCK TABLE repairtable;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/schemeAuthority.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/schemeAuthority.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/schemeAuthority.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/schemeAuthority.q Fri Apr 26 04:59:50 2013
@@ -1,5 +1,5 @@
-dfs -mkdir file:///tmp/test;
-dfs -mkdir hdfs:///tmp/test;
+dfs ${system:test.dfs.mkdir} file:///tmp/test;
+dfs ${system:test.dfs.mkdir} hdfs:///tmp/test;
create external table dynPart (key string) partitioned by (value string) row format delimited fields terminated by '\\t' stored as textfile;
insert overwrite local directory "/tmp/test" select key from src where (key = 10) order by key;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/semijoin.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/semijoin.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/semijoin.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/semijoin.q Fri Apr 26 04:59:50 2013
@@ -77,7 +77,5 @@ select a.key from t3 a left semi join t1
explain select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.value = c.value sort by a.key;
select a.key from t3 a left semi join t2 b on a.key = b.key left outer join t1 c on a.value = c.value sort by a.key;
-
-
-
-
+explain select a.key from t3 a left semi join t2 b on a.value = b.value where a.key > 100;
+select a.key from t3 a left semi join t2 b on a.value = b.value where a.key > 100;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/stats_noscan_2.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/stats_noscan_2.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/stats_noscan_2.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/stats_noscan_2.q Fri Apr 26 04:59:50 2013
@@ -12,7 +12,7 @@ drop table anaylyze_external;
-- 2 test partition
-- prepare data
create table texternal(key string, val string) partitioned by (insertdate string);
-dfs -mkdir ${system:test.tmp.dir}/texternal/2008-01-01;
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/texternal/2008-01-01;
alter table texternal add partition (insertdate='2008-01-01') location 'pfile://${system:test.tmp.dir}/texternal/2008-01-01';
from src insert overwrite table texternal partition (insertdate='2008-01-01') select *;
select count(*) from texternal where insertdate='2008-01-01';
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/udtf_explode.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/udtf_explode.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/udtf_explode.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/udtf_explode.q Fri Apr 26 04:59:50 2013
@@ -15,4 +15,11 @@ SELECT explode(map(1,'one',2,'two',3,'th
SELECT a.myKey, a.myVal, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) as (myKey,myVal) FROM src LIMIT 3) a GROUP BY a.myKey, a.myVal;
SELECT src.key, myCol FROM src lateral view explode(array(1,2,3)) x AS myCol LIMIT 3;
-SELECT src.key, myKey, myVal FROM src lateral view explode(map(1,'one',2,'two',3,'three')) x AS myKey,myVal LIMIT 3;
\ No newline at end of file
+SELECT src.key, myKey, myVal FROM src lateral view explode(map(1,'one',2,'two',3,'three')) x AS myKey,myVal LIMIT 3;
+
+-- HIVE-4295
+SELECT BLOCK__OFFSET__INSIDE__FILE, src.key, myKey, myVal FROM src lateral view explode(map(1,'one',2,'two',3,'three')) x AS myKey,myVal LIMIT 3;
+
+set hive.optimize.cp=false;
+SELECT src.key, myKey, myVal FROM src lateral view explode(map(1,'one',2,'two',3,'three')) x AS myKey,myVal LIMIT 3;
+SELECT BLOCK__OFFSET__INSIDE__FILE, src.key, myKey, myVal FROM src lateral view explode(map(1,'one',2,'two',3,'three')) x AS myKey,myVal LIMIT 3;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing.q Fri Apr 26 04:59:50 2013
@@ -106,57 +106,57 @@ from part
-- 11. testFirstLast
select p_mfgr,p_name, p_size,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr rows between current row and current row) as s2,
+sum(p_size) over (distribute by p_mfgr sort by p_name rows between current row and current row) as s2,
first_value(p_size) over w1 as f,
last_value(p_size, false) over w1 as l
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following);
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and 2 following);
-- 12. testFirstLastWithWhere
select p_mfgr,p_name, p_size,
-rank() over(distribute by p_mfgr sort by p_mfgr) as r,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr rows between current row and current row) as s2,
+rank() over(distribute by p_mfgr sort by p_name) as r,
+sum(p_size) over (distribute by p_mfgr sort by p_name rows between current row and current row) as s2,
first_value(p_size) over w1 as f,
last_value(p_size, false) over w1 as l
from part
where p_mfgr = 'Manufacturer#3'
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following);
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and 2 following);
-- 13. testSumWindow
select p_mfgr,p_name, p_size,
sum(p_size) over w1 as s1,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr rows between current row and current row) as s2
+sum(p_size) over (distribute by p_mfgr sort by p_name rows between current row and current row) as s2
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following);
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and 2 following);
-- 14. testNoSortClause
select p_mfgr,p_name, p_size,
-rank() over(distribute by p_mfgr) as r, dense_rank() over(distribute by p_mfgr) as dr
+rank() over(distribute by p_mfgr sort by p_name) as r, dense_rank() over(distribute by p_mfgr sort by p_name) as dr
from part
-window w1 as (distribute by p_mfgr rows between 2 preceding and 2 following);
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and 2 following);
-- 15. testExpressions
select p_mfgr,p_name, p_size,
-rank() over(distribute by p_mfgr sort by p_mfgr) as r,
-dense_rank() over(distribute by p_mfgr sort by p_mfgr) as dr,
-cume_dist() over(distribute by p_mfgr sort by p_mfgr) as cud,
-percent_rank() over(distribute by p_mfgr sort by p_mfgr) as pr,
-ntile(3) over(distribute by p_mfgr sort by p_mfgr) as nt,
-count(p_size) over(distribute by p_mfgr sort by p_mfgr) as ca,
-avg(p_size) over(distribute by p_mfgr sort by p_mfgr) as avg,
-stddev(p_size) over(distribute by p_mfgr sort by p_mfgr) as st,
-first_value(p_size % 5) over(distribute by p_mfgr sort by p_mfgr) as fv,
-last_value(p_size) over(distribute by p_mfgr sort by p_mfgr) as lv,
+rank() over(distribute by p_mfgr sort by p_name) as r,
+dense_rank() over(distribute by p_mfgr sort by p_name) as dr,
+cume_dist() over(distribute by p_mfgr sort by p_name) as cud,
+percent_rank() over(distribute by p_mfgr sort by p_name) as pr,
+ntile(3) over(distribute by p_mfgr sort by p_name) as nt,
+count(p_size) over(distribute by p_mfgr sort by p_name) as ca,
+avg(p_size) over(distribute by p_mfgr sort by p_name) as avg,
+stddev(p_size) over(distribute by p_mfgr sort by p_name) as st,
+first_value(p_size % 5) over(distribute by p_mfgr sort by p_name) as fv,
+last_value(p_size) over(distribute by p_mfgr sort by p_name) as lv,
first_value(p_size) over w1 as fvW1
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following);
-- 16. testMultipleWindows
select p_mfgr,p_name, p_size,
- rank() over(distribute by p_mfgr sort by p_mfgr) as r,
- dense_rank() over(distribute by p_mfgr sort by p_mfgr) as dr,
-cume_dist() over(distribute by p_mfgr sort by p_mfgr) as cud,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr, p_name rows between unbounded preceding and current row) as s1,
+ rank() over(distribute by p_mfgr sort by p_name) as r,
+ dense_rank() over(distribute by p_mfgr sort by p_name) as dr,
+cume_dist() over(distribute by p_mfgr sort by p_name) as cud,
+sum(p_size) over (distribute by p_mfgr sort by p_name range between unbounded preceding and current row) as s1,
sum(p_size) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
first_value(p_size) over w1 as fv1
from part
@@ -164,8 +164,8 @@ window w1 as (distribute by p_mfgr sort
-- 17. testCountStar
select p_mfgr,p_name, p_size,
-count(*) over(distribute by p_mfgr sort by p_mfgr ) as c,
-count(p_size) over(distribute by p_mfgr sort by p_mfgr) as ca,
+count(*) over(distribute by p_mfgr sort by p_name ) as c,
+count(p_size) over(distribute by p_mfgr sort by p_name) as ca,
first_value(p_size) over w1 as fvW1
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following);
@@ -204,28 +204,37 @@ window w1 as (distribute by p_mfgr sort
select p_mfgr,p_name, p_size,
histogram_numeric(p_retailprice, 5) over w1 as hist,
percentile(p_partkey, 0.5) over w1 as per,
-row_number() over(distribute by p_mfgr sort by p_mfgr) as rn
+row_number() over(distribute by p_mfgr sort by p_name) as rn
from part
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following);
-- 22. testViewAsTableInputWithWindowing
create view IF NOT EXISTS mfgr_price_view as
select p_mfgr, p_brand,
-sum(p_retailprice) as s
+round(sum(p_retailprice),2) as s
from part
group by p_mfgr, p_brand;
+select *
+from (
+select p_mfgr, p_brand, s,
+round(sum(s),2) over w1 as s1
+from mfgr_price_view
+window w1 as (distribute by p_mfgr sort by p_mfgr )
+) sq
+order by p_mfgr, p_brand;
+
select p_mfgr, p_brand, s,
-sum(s) over w1 as s1
+round(sum(s),2) over w1 as s1
from mfgr_price_view
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and current row);
+window w1 as (distribute by p_mfgr sort by p_brand rows between 2 preceding and current row);
-- 23. testCreateViewWithWindowingQuery
create view IF NOT EXISTS mfgr_brand_price_view as
select p_mfgr, p_brand,
sum(p_retailprice) over w1 as s
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and current row);
+window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and current row);
select * from mfgr_brand_price_view;
@@ -234,7 +243,7 @@ select p_mfgr, p_name,
lv_col, p_size, sum(p_size) over w1 as s
from (select p_mfgr, p_name, p_size, array(1,2,3) arr from part) p
lateral view explode(arr) part_lv as lv_col
-window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and current row);
+window w1 as (distribute by p_mfgr sort by p_size, lv_col rows between 2 preceding and current row);
-- 25. testMultipleInserts3SWQs
CREATE TABLE part_1(
@@ -251,8 +260,7 @@ p_name STRING,
p_size INT,
r INT,
dr INT,
-cud INT,
-s1 DOUBLE,
+cud INT,
s2 DOUBLE,
fv1 INT);
@@ -272,17 +280,16 @@ dense_rank() over(distribute by p_mfgr s
sum(p_retailprice) over (distribute by p_mfgr sort by p_name rows between unbounded preceding and current row) as s
INSERT OVERWRITE TABLE part_2
select p_mfgr,p_name, p_size,
-rank() over(distribute by p_mfgr sort by p_mfgr) as r,
-dense_rank() over(distribute by p_mfgr sort by p_mfgr) as dr,
-cume_dist() over(distribute by p_mfgr sort by p_mfgr) as cud,
-sum(p_size) over (distribute by p_mfgr sort by p_mfgr, p_name rows between unbounded preceding and current row) as s1,
-sum(p_size) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
+rank() over(distribute by p_mfgr sort by p_name) as r,
+dense_rank() over(distribute by p_mfgr sort by p_name) as dr,
+cume_dist() over(distribute by p_mfgr sort by p_name) as cud,
+round(sum(p_size),1) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
first_value(p_size) over w1 as fv1
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following)
INSERT OVERWRITE TABLE part_3
select p_mfgr,p_name, p_size,
-count(*) over(distribute by p_mfgr sort by p_mfgr) as c,
-count(p_size) over(distribute by p_mfgr sort by p_mfgr) as ca,
+count(*) over(distribute by p_mfgr sort by p_name) as c,
+count(p_size) over(distribute by p_mfgr sort by p_name) as ca,
first_value(p_size) over w1 as fv
window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following);
@@ -333,7 +340,7 @@ select p_mfgr, p_name, p_size,
sum(p_size) over w1 as s1,
sum(p_size) over w2 as s2
from part
-window w1 as (partition by p_mfgr order by p_mfgr rows between 2 preceding and 2 following),
+window w1 as (partition by p_mfgr order by p_name range between 2 preceding and 2 following),
w2 as w1;
@@ -342,7 +349,7 @@ select p_mfgr, p_name, p_size,
sum(p_size) over w1 as s1,
sum(p_size) over w2 as s2
from part
-window w1 as (partition by p_mfgr order by p_mfgr rows between 2 preceding and 2 following),
+window w1 as (partition by p_mfgr order by p_name range between 2 preceding and 2 following),
w2 as (w1 rows between unbounded preceding and current row);
@@ -352,9 +359,9 @@ sum(p_size) over w1 as s1,
sum(p_size) over w2 as s2,
sum(p_size) over w3 as s3
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following),
+window w1 as (distribute by p_mfgr sort by p_name range between 2 preceding and 2 following),
w2 as w3,
- w3 as (distribute by p_mfgr sort by p_mfgr rows between unbounded preceding and current row);
+ w3 as (distribute by p_mfgr sort by p_name range between unbounded preceding and current row);
-- 34. testWindowDefinitionPropagation
@@ -363,9 +370,9 @@ sum(p_size) over w1 as s1,
sum(p_size) over w2 as s2,
sum(p_size) over (w3 rows between 2 preceding and 2 following) as s3
from part
-window w1 as (distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following),
+window w1 as (distribute by p_mfgr sort by p_name range between 2 preceding and 2 following),
w2 as w3,
- w3 as (distribute by p_mfgr sort by p_mfgr rows between unbounded preceding and current row);
+ w3 as (distribute by p_mfgr sort by p_name range between unbounded preceding and current row);
-- 35. testDistinctWithWindowing
select DISTINCT p_mfgr, p_name, p_size,
@@ -379,11 +386,11 @@ rank() over (partition by p_mfgr order b
from part;
-- 37. testPartitioningVariousForms
-select p_mfgr, p_name, p_size,
-sum(p_retailprice) over (partition by p_mfgr order by p_mfgr) as s1,
+select p_mfgr,
+round(sum(p_retailprice),2) over (partition by p_mfgr order by p_mfgr) as s1,
min(p_retailprice) over (partition by p_mfgr) as s2,
max(p_retailprice) over (distribute by p_mfgr sort by p_mfgr) as s3,
-avg(p_retailprice) over (distribute by p_mfgr) as s4,
+round(avg(p_retailprice),2) over (distribute by p_mfgr) as s4,
count(p_retailprice) over (cluster by p_mfgr ) as s5
from part;
@@ -421,7 +428,7 @@ select p_mfgr, p_name, p_size,
-- 44. testOverNoPartitionSingleAggregate
select p_name, p_retailprice,
-avg(p_retailprice) over()
+round(avg(p_retailprice),2) over()
from part
order by p_name;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_expressions.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_expressions.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_expressions.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_expressions.q Fri Apr 26 04:59:50 2013
@@ -35,7 +35,7 @@ create table over10k(
load data local inpath '../data/files/over10k' into table over10k;
select p_mfgr, p_retailprice, p_size,
-round(sum(p_retailprice),2) = round((sum(lag(p_retailprice,1)) - first_value(p_retailprice)) + last_value(p_retailprice),2)
+round(sum(p_retailprice),2) = round(sum(lag(p_retailprice,1,0.0)) + last_value(p_retailprice),2)
over(distribute by p_mfgr sort by p_retailprice),
max(p_retailprice) - min(p_retailprice) = last_value(p_retailprice) - first_value(p_retailprice)
over(distribute by p_mfgr sort by p_retailprice)
@@ -49,9 +49,9 @@ sum(p_retailprice) - 5 over (distribute
from part
;
-select s, si, f, si - lead(f, 3) over (partition by t order by bo desc) from over10k limit 100;
-select s, i, i - lead(i, 3, 0) over (partition by si order by i) from over10k limit 100;
-select s, si, d, si - lag(d, 3) over (partition by b order by si) from over10k limit 100;
+select s, si, f, si - lead(f, 3) over (partition by t order by bo,s,si,f desc) from over10k limit 100;
+select s, i, i - lead(i, 3, 0) over (partition by si order by i,s) from over10k limit 100;
+select s, si, d, si - lag(d, 3) over (partition by b order by si,s,d) from over10k limit 100;
select s, lag(s, 3, 'fred') over (partition by f order by b) from over10k limit 100;
select p_mfgr, avg(p_retailprice) over(partition by p_mfgr, p_type order by p_mfgr) from part;
@@ -61,6 +61,12 @@ select p_mfgr, avg(p_retailprice) over(p
-- multi table insert test
create table t1 (a1 int, b1 string);
create table t2 (a1 int, b1 string);
-from (select sum(i) over (), s from over10k) tt insert overwrite table t1 select * insert overwrite table t2 select * ;
+from (select sum(i) over (partition by ts order by i), s from over10k) tt insert overwrite table t1 select * insert overwrite table t2 select * ;
select * from t1 limit 3;
select * from t2 limit 3;
+
+select p_mfgr, p_retailprice, p_size,
+round(sum(p_retailprice),2) + 50.0 = round(sum(lag(p_retailprice,1,50.0)) + last_value(p_retailprice),2)
+ over(distribute by p_mfgr sort by p_retailprice)
+from part
+limit 11;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_multipartitioning.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_multipartitioning.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_multipartitioning.q Fri Apr 26 04:59:50 2013
@@ -30,7 +30,7 @@ select s, sum(i) over (partition by s),
select s, rank() over (partition by s order by bo), rank() over (partition by si order by bin desc) from over10k
where s = 'tom allen' or s = 'bob steinbeck';
-select s, sum(f) over (partition by i), row_number() over () from over10k where s = 'tom allen' or s = 'bob steinbeck';
+select s, sum(f) over (partition by i), row_number() over (order by f) from over10k where s = 'tom allen' or s = 'bob steinbeck';
select s, rank() over w1,
rank() over w2
@@ -39,4 +39,4 @@ where s = 'tom allen' or s = 'bob steinb
window
w1 as (partition by s order by dec),
w2 as (partition by si order by f)
-;
\ No newline at end of file
+;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_navfn.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_navfn.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_navfn.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_navfn.q Fri Apr 26 04:59:50 2013
@@ -19,9 +19,9 @@ load data local inpath '../data/files/ov
select s, row_number() over (partition by d order by dec) from over10k limit 100;
-select i, lead(s) over (partition by bin order by d desc) from over10k limit 100;
+select i, lead(s) over (partition by bin order by d,i desc) from over10k limit 100;
-select i, lag(dec) over (partition by i order by s) from over10k limit 100;
+select i, lag(dec) over (partition by i order by s,i,dec) from over10k limit 100;
select s, last_value(t) over (partition by d order by f) from over10k limit 100;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_ntile.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_ntile.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_ntile.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_ntile.q Fri Apr 26 04:59:50 2013
@@ -17,12 +17,12 @@ create table over10k(
load data local inpath '../data/files/over10k' into table over10k;
-select i, ntile(10) over (partition by s) from over10k limit 100;
+select i, ntile(10) over (partition by s order by i) from over10k limit 100;
-select s, ntile(100) over (partition by i) from over10k limit 100;
+select s, ntile(100) over (partition by i order by s) from over10k limit 100;
-select f, ntile(4) over (partition by d) from over10k limit 100;
+select f, ntile(4) over (partition by d order by f) from over10k limit 100;
-select d, ntile(1000) over (partition by dec) from over10k limit 100;
+select d, ntile(1000) over (partition by dec order by d) from over10k limit 100;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_rank.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_rank.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_rank.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_rank.q Fri Apr 26 04:59:50 2013
@@ -19,9 +19,9 @@ load data local inpath '../data/files/ov
select s, rank() over (partition by f order by t) from over10k limit 100;
-select s, dense_rank() over (partition by ts order by i desc) from over10k limit 100;
+select s, dense_rank() over (partition by ts order by i,s desc) from over10k limit 100;
-select s, cume_dist() over (partition by bo order by b) from over10k limit 100;
+select s, cume_dist() over (partition by bo order by b,s) from over10k limit 100;
select s, percent_rank() over (partition by dec order by f) from over10k limit 100;
@@ -30,7 +30,7 @@ select s, percent_rank() over (partition
select ts, dec, rnk
from
(select ts, dec,
- rank() over (partition by ts) as rnk
+ rank() over (partition by ts order by dec) as rnk
from
(select other.ts, other.dec
from over10k other
@@ -54,7 +54,7 @@ where dec = 89.5 limit 10;
select ts, dec, rnk
from
(select ts, dec,
- rank() over (partition by ts) as rnk
+ rank() over (partition by ts order by dec) as rnk
from
(select other.ts, other.dec
from over10k other
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_udaf.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_udaf.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_udaf.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_udaf.q Fri Apr 26 04:59:50 2013
@@ -19,9 +19,9 @@ load data local inpath '../data/files/ov
select s, min(i) over (partition by s) from over10k limit 100;
-select s, avg(f) over (partition by si order by t) from over10k limit 100;
+select s, avg(f) over (partition by si order by s) from over10k limit 100;
-select s, avg(i) over (partition by t, b) from over10k limit 100;
+select s, avg(i) over (partition by t, b order by s) from over10k limit 100;
select max(i) over w from over10k window w as (partition by f) limit 100;
Modified: hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_windowspec.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_windowspec.q?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_windowspec.q (original)
+++ hive/branches/HIVE-4115/ql/src/test/queries/clientpositive/windowing_windowspec.q Fri Apr 26 04:59:50 2013
@@ -17,18 +17,18 @@ create table over10k(
load data local inpath '../data/files/over10k' into table over10k;
-select s, sum(b) over (partition by i order by si rows unbounded preceding) from over10k limit 100;
+select s, sum(b) over (partition by i order by s,b rows unbounded preceding) from over10k limit 100;
-select s, sum(f) over (partition by d order by i rows unbounded preceding) from over10k limit 100;
+select s, sum(f) over (partition by d order by s,f rows unbounded preceding) from over10k limit 100;
-select s, sum(f) over (partition by ts order by b range between current row and unbounded following) from over10k limit 100;
+select s, sum(f) over (partition by ts order by f range between current row and unbounded following) from over10k limit 100;
-select s, avg(f) over (partition by bin order by s rows between current row and 5 following) from over10k limit 100;
+select s, avg(f) over (partition by ts order by s,f rows between current row and 5 following) from over10k limit 100;
-select s, avg(d) over (partition by t order by ts desc rows between 5 preceding and 5 following) from over10k limit 100;
+select s, avg(d) over (partition by t order by s,d desc rows between 5 preceding and 5 following) from over10k limit 100;
-select s, sum(i) over() from over10k limit 100;
+select s, sum(i) over(partition by ts order by s) from over10k limit 100;
-select f, sum(f) over (order by f range between unbounded preceding and current row) from over10k limit 100;
+select f, sum(f) over (partition by ts order by f range between unbounded preceding and current row) from over10k limit 100;
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out Fri Apr 26 04:59:50 2013
@@ -61,5 +61,5 @@ WHERE key=86
View Expanded Text: SELECT `src`.`key`, `src`.`value`
FROM `default`.`src`
WHERE `src`.`key`=86
-FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has paritions
+FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions
The following view has partition, it could not be replaced: testViewPart
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view1.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view1.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view1.q.out Fri Apr 26 04:59:50 2013
@@ -20,5 +20,5 @@ POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@srcpart
POSTHOOK: Input: default@v
POSTHOOK: Output: default@v@ds=1/hr=2
-FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has paritions
+FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions
The following view has partition, it could not be replaced: v
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view2.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view2.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/create_or_replace_view2.q.out Fri Apr 26 04:59:50 2013
@@ -20,5 +20,5 @@ POSTHOOK: type: ALTERTABLE_ADDPARTS
POSTHOOK: Input: default@srcpart
POSTHOOK: Input: default@v
POSTHOOK: Output: default@v@ds=1/hr=2
-FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has paritions
+FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has partitions
The following view has partition, it could not be replaced: v
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true.q.out Fri Apr 26 04:59:50 2013
@@ -24,10 +24,10 @@ STAGE PLANS:
Lateral View Forward
Select Operator
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: assert_true((_col2 > 0))
+ expr: assert_true((_col4 > 0))
type: void
outputColumnNames: _col0
Limit
@@ -45,10 +45,10 @@ STAGE PLANS:
UDTF Operator
function name: explode
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: assert_true((_col2 > 0))
+ expr: assert_true((_col4 > 0))
type: void
outputColumnNames: _col0
Limit
@@ -95,10 +95,10 @@ STAGE PLANS:
Lateral View Forward
Select Operator
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: assert_true((_col2 < 2))
+ expr: assert_true((_col4 < 2))
type: void
outputColumnNames: _col0
Limit
@@ -116,10 +116,10 @@ STAGE PLANS:
UDTF Operator
function name: explode
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: assert_true((_col2 < 2))
+ expr: assert_true((_col4 < 2))
type: void
outputColumnNames: _col0
Limit
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true2.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true2.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientnegative/udf_assert_true2.q.out Fri Apr 26 04:59:50 2013
@@ -19,10 +19,10 @@ STAGE PLANS:
Lateral View Forward
Select Operator
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: (1 + assert_true((_col2 < 2)))
+ expr: (1 + assert_true((_col4 < 2)))
type: int
outputColumnNames: _col0
Limit
@@ -40,10 +40,10 @@ STAGE PLANS:
UDTF Operator
function name: explode
Lateral View Join Operator
- outputColumnNames: _col2
+ outputColumnNames: _col4
Select Operator
expressions:
- expr: (1 + assert_true((_col2 < 2)))
+ expr: (1 + assert_true((_col4 < 2)))
type: int
outputColumnNames: _col0
Limit
Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/allcolref_in_udf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/allcolref_in_udf.q.out?rev=1476039&r1=1476038&r2=1476039&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/allcolref_in_udf.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/allcolref_in_udf.q.out Fri Apr 26 04:59:50 2013
@@ -186,3 +186,68 @@ POSTHOOK: Input: default@src
8val_89 NULL 9val_9
9val_910val_10 9val_9 10val_10
9val_910 NULL 10val_10
+PREHOOK: query: -- HIVE-4181 TOK_FUNCTIONSTAR for UDTF
+create table allcolref as select array(key, value) from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: -- HIVE-4181 TOK_FUNCTIONSTAR for UDTF
+create table allcolref as select array(key, value) from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@allcolref
+PREHOOK: query: explain select explode(*) as x from allcolref limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select explode(*) as x from allcolref limit 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME allcolref))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR explode) x)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ allcolref
+ TableScan
+ alias: allcolref
+ Select Operator
+ expressions:
+ expr: _c0
+ type: array<string>
+ outputColumnNames: _col0
+ UDTF Operator
+ function name: explode
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 10
+
+
+PREHOOK: query: select explode(*) as x from allcolref limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@allcolref
+#### A masked pattern was here ####
+POSTHOOK: query: select explode(*) as x from allcolref limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@allcolref
+#### A masked pattern was here ####
+238
+val_238
+86
+val_86
+311
+val_311
+27
+val_27
+165
+val_165