You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@trafodion.apache.org by sa...@apache.org on 2017/05/08 20:28:33 UTC
[1/8] incubator-trafodion git commit: Changes to move all Trafodion
created hdfs files under /user/trafodion
Repository: incubator-trafodion
Updated Branches:
refs/heads/master f60c1b096 -> 33a9005d2
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST005
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST005 b/core/sql/regress/hive/TEST005
index e723fd7..6cb42c9 100644
--- a/core/sql/regress/hive/TEST005
+++ b/core/sql/regress/hive/TEST005
@@ -26,33 +26,33 @@
--
-- @@@ END COPYRIGHT @@@
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/customer_ddl;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/customer_temp;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_utf8;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_type;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_gbk;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_dos;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_dos_num;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/tbl_bad;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/customer_ddl;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/customer_temp;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_utf8;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_type;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_gbk;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_dos;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_dos_num;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/tbl_bad;
--empty folders
-sh regrhadoop.ksh fs -rm /user/hive/exttables/customer_ddl/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/customer_temp/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_utf8/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_type/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_gbk/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_dos/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_dos_num/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/tbl_bad/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/customer_ddl/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/customer_temp/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_utf8/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_type/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_gbk/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_dos/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_dos_num/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/tbl_bad/*;
--- setup Hive tables
sh regrhive.ksh -v -f $REGRTSTDIR/TEST005_a.hive.sql;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_utf8.data /user/hive/exttables/tbl_utf8;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_type.data /user/hive/exttables/tbl_type;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_gbk.data /user/hive/exttables/tbl_gbk;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_dos.data /user/hive/exttables/tbl_dos;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_dos_numeric.data /user/hive/exttables/tbl_dos_num;
-sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_bad.data /user/hive/exttables/tbl_bad;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_utf8.data /user/trafodion/hive/exttables/tbl_utf8;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_type.data /user/trafodion/hive/exttables/tbl_type;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_gbk.data /user/trafodion/hive/exttables/tbl_gbk;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_dos.data /user/trafodion/hive/exttables/tbl_dos;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_dos_numeric.data /user/trafodion/hive/exttables/tbl_dos_num;
+sh regrhadoop.ksh fs -put $REGRTSTDIR/tbl_bad.data /user/trafodion/hive/exttables/tbl_bad;
log LOG005 clear;
@@ -155,7 +155,7 @@ select * from hiveregr5.newtable2;
-- add a second partition to customer_bp
sh regrhive.ksh -v -f $REGRTSTDIR/TEST005_c.hive.sql;
-- add more files to customer_ddl
-sh regrhadoop.ksh dfs -cp /user/hive/exttables/customer_temp/* /user/hive/exttables/customer_ddl;
+sh regrhadoop.ksh dfs -cp /user/trafodion/hive/exttables/customer_temp/* /user/trafodion/hive/exttables/customer_ddl;
-- no query cache hit, but NATable cache hit
prepare s3 from
@@ -266,7 +266,7 @@ select count(*) from trafodion.seabase.traf_tbl_bad;
delete from trafodion.seabase.traf_tbl_bad ;
load with log error rows into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
select count(*) from trafodion.seabase.traf_tbl_bad;
-load with log error rows to '/bulkload/logs/TEST005' into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
+load with log error rows to '/user/trafodion/bulkload/logs/TEST005' into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
select count(*) from trafodion.seabase.traf_tbl_bad;
delete from trafodion.seabase.traf_tbl_bad ;
load with stop after 3 error rows into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST005_a.hive.sql
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST005_a.hive.sql b/core/sql/regress/hive/TEST005_a.hive.sql
index 0437bbb..f73790e 100644
--- a/core/sql/regress/hive/TEST005_a.hive.sql
+++ b/core/sql/regress/hive/TEST005_a.hive.sql
@@ -42,7 +42,7 @@ create external table customer_ddl
c_last_review_date string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/customer_ddl';
+location '/user/trafodion/hive/exttables/customer_ddl';
drop table customer_temp;
create external table customer_temp
@@ -67,7 +67,7 @@ create external table customer_temp
c_last_review_date string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/customer_temp';
+location '/user/trafodion/hive/exttables/customer_temp';
drop table customer_bp;
create table customer_bp
@@ -104,7 +104,7 @@ create external table tbl_utf8
translator string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/tbl_utf8';
+location '/user/trafodion/hive/exttables/tbl_utf8';
drop table tbl_utf8_temp;
create table tbl_utf8_temp
@@ -145,7 +145,7 @@ create external table tbl_type
d30 decimal(30)
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/tbl_type';
+location '/user/trafodion/hive/exttables/tbl_type';
drop table tbl_type_temp;
create table tbl_type_temp
@@ -174,7 +174,7 @@ create external table tbl_gbk
c2 string
)
row format delimited fields terminated by '\t'
-location '/user/hive/exttables/tbl_gbk';
+location '/user/trafodion/hive/exttables/tbl_gbk';
drop table tbl_dos;
CREATE external TABLE tbl_dos(
@@ -183,7 +183,7 @@ CREATE external TABLE tbl_dos(
c3 int,
c4 string)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/tbl_dos'
+location '/user/trafodion/hive/exttables/tbl_dos'
;
drop table tbl_dos_num;
@@ -191,7 +191,7 @@ CREATE external TABLE tbl_dos_num(
c1 int,
c2 int)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/tbl_dos_num'
+location '/user/trafodion/hive/exttables/tbl_dos_num'
;
drop table tbl_bad;
@@ -208,7 +208,7 @@ c8 tinyint
ROW FORMAT DELIMITED
FIELDS TERMINATED BY '|'
LOCATION
-'/user/hive/exttables/tbl_bad';
+'/user/trafodion/hive/exttables/tbl_bad';
drop table thive_insert_smallint;
create table thive_insert_smallint (a smallint);
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST009
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST009 b/core/sql/regress/hive/TEST009
index 3403deb..f6c8a14 100755
--- a/core/sql/regress/hive/TEST009
+++ b/core/sql/regress/hive/TEST009
@@ -56,8 +56,8 @@ drop schema if exists "_HV_SCH_T009_" cascade;
drop schema if exists hive_t009 cascade;
-- remove data from hdfs
-sh regrhadoop.ksh fs -rm /user/hive/exttables/t009t1/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/t009t2/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/t009t1/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/t009t2/*;
-- Remove external hive tables used by this test
drop external table if exists customer for hive.hive.customer cascade;
@@ -170,7 +170,7 @@ drop external table item for hive.hive.item;
-- this causes the external table to be invalid
-- cleanup data from the old table, and create/load data with additional column
-sh regrhadoop.ksh fs -rm /user/hive/exttables/t009t1/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/t009t1/*;
sh regrhive.ksh -v -f $REGRTSTDIR/TEST009_b.hive.sql &> $REGRRUNDIR/LOG009_b.hive.log;;
-- should fail - column mismatch
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST009_a.hive.sql
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST009_a.hive.sql b/core/sql/regress/hive/TEST009_a.hive.sql
index 38a8563..390bbbd 100644
--- a/core/sql/regress/hive/TEST009_a.hive.sql
+++ b/core/sql/regress/hive/TEST009_a.hive.sql
@@ -33,7 +33,7 @@ create external table t009t1
c int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/t009t1';
+location '/user/trafodion/hive/exttables/t009t1';
-- Our version of HIVE does not support insert ... VALUES clause, so use the
-- load command from an existing table.
@@ -52,7 +52,7 @@ create external table t009t2
c int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/t009t2';
+location '/user/trafodion/hive/exttables/t009t2';
insert into table t009t2
select c_customer_sk, c_birth_day, c_birth_month
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST009_b.hive.sql
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST009_b.hive.sql b/core/sql/regress/hive/TEST009_b.hive.sql
index a27994f..b9d088d 100644
--- a/core/sql/regress/hive/TEST009_b.hive.sql
+++ b/core/sql/regress/hive/TEST009_b.hive.sql
@@ -34,7 +34,7 @@ create external table t009t1
d int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/t009t1';
+location '/user/trafodion/hive/exttables/t009t1';
-- Our version of HIVE does not support insert ... VALUES clause, so use the
-- load command from an existing table.
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST018
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST018 b/core/sql/regress/hive/TEST018
index 364baf8..7759805 100644
--- a/core/sql/regress/hive/TEST018
+++ b/core/sql/regress/hive/TEST018
@@ -83,11 +83,11 @@ drop table store_sales_salt;
drop table nulls;
drop table null_format_src;
-sh regrhadoop.ksh fs -rm /bulkload/merged_customer_address.gz ;
-sh regrhadoop.ksh fs -rm /bulkload/merged_customer_demogs.gz ;
-sh regrhadoop.ksh fs -rm /bulkload/merged_customer_demogs_3;
-sh regrhadoop.ksh fs -rm /bulkload/merged_customer_demogs_4.gz ;
-sh regrhadoop.ksh fs -rm /bulkload/merged_customer_demogs_2.gz ;
+sh regrhadoop.ksh fs -rm /user/trafodion/bulkload/merged_customer_address.gz ;
+sh regrhadoop.ksh fs -rm /user/trafodion/bulkload/merged_customer_demogs.gz ;
+sh regrhadoop.ksh fs -rm /user/trafodion/bulkload/merged_customer_demogs_3;
+sh regrhadoop.ksh fs -rm /user/trafodion/bulkload/merged_customer_demogs_4.gz ;
+sh regrhadoop.ksh fs -rm /user/trafodion/bulkload/merged_customer_demogs_2.gz ;
sh regrhbase.ksh $REGRTSTDIR/TEST018_drop_hbase_objects.hbase &> $REGRRUNDIR/TEST018_drop_hbase_objects.log ;
?section setup
--------------------------------------------------------------------------
@@ -260,17 +260,17 @@ select * from hive.hive.null_format_colon;
-- using unload
unload with purgedata from target
- into '/user/hive/exttables/null_format_default'
+ into '/user/trafodion/hive/exttables/null_format_default'
select * from null_format_src;
select * from hive.hive.null_format_default;
unload with purgedata from target
- into '/user/hive/exttables/null_format_empty'
+ into '/user/trafodion/hive/exttables/null_format_empty'
select * from null_format_src;
select * from hive.hive.null_format_empty;
unload with purgedata from target
- into '/user/hive/exttables/null_format_colon'
+ into '/user/trafodion/hive/exttables/null_format_colon'
select * from null_format_src;
select * from hive.hive.null_format_colon;
@@ -285,7 +285,7 @@ alter table hbase.customer_salt generate stored descriptor;
--exp1
explain options 'f'
-UNLOAD EXTRACT TO '/bulkload/customer_address'
+UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_address'
select * from trafodion.hbase.customer_address
<<+ cardinality 10e10 >>;
--unload1
@@ -294,12 +294,12 @@ WITH
PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
MERGE FILE 'merged_customer_address.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_address'
+INTO '/user/trafodion/bulkload/customer_address'
select * from trafodion.hbase.customer_address
;
log;
-sh echo "regrhadoop.ksh fs -copyToLocal /bulkload/customer_address/merged_customer_address.gz /tmp " >> LOG018 ;;
-sh regrhadoop.ksh fs -copyToLocal /bulkload/customer_address/merged_customer_address.gz /tmp ;
+sh echo "regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_address/merged_customer_address.gz /tmp " >> LOG018 ;;
+sh regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_address/merged_customer_address.gz /tmp ;
sh echo "gunzip -f /tmp/merged_customer_address.gz" >> LOG018 ; ;
sh gunzip -f /tmp/merged_customer_address.gz ;
sh echo "cat /tmp/merged_customer_address | wc -l" >> LOG018 ;
@@ -316,12 +316,12 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_demographics'
+INTO '/user/trafodion/bulkload/customer_demographics'
select * from trafodion.hbase.customer_demographics
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -copyToLocal /bulkload/customer_demographics/merged_customer_demogs.gz /tmp " >> LOG018 ;
-sh regrhadoop.ksh fs -copyToLocal /bulkload/customer_demographics/merged_customer_demogs.gz /tmp ;
+sh echo "regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_demographics/merged_customer_demogs.gz /tmp " >> LOG018 ;
+sh regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_demographics/merged_customer_demogs.gz /tmp ;
sh echo "gunzip -f /tmp/merged_customer_demogs.gz" >> LOG018 ;
sh gunzip -f /tmp/merged_customer_demogs.gz ;
sh echo "cat /tmp/merged_customer_demogs | wc -l" >> LOG018 ;
@@ -333,17 +333,17 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs_2' OVERWRITE
--COMPRESSION GZIP
-INTO '/bulkload/customer_demographics'
+INTO '/user/trafodion/bulkload/customer_demographics'
select * from trafodion.hbase.customer_demographics
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -cat /bulkload/customer_demographics/merged_customer_demogs_2 | wc -l " >> LOG018 ;
-sh regrhadoop.ksh fs -cat /bulkload/customer_demographics/merged_customer_demogs_2 | wc -l >> LOG018 ;
+sh echo "regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics/merged_customer_demogs_2 | wc -l " >> LOG018 ;
+sh regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics/merged_customer_demogs_2 | wc -l >> LOG018 ;
log LOG018;
----------------------------------
--exp 3
explain options 'f'
-UNLOAD EXTRACT TO '/bulkload/customer_demographics_salt'
+UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
--unload 4
@@ -356,8 +356,8 @@ select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_3" >> LOG018 ;
-sh regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_3 >> LOG018 ;
+sh echo "regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_3" >> LOG018 ;
+sh regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_3 >> LOG018 ;
log LOG018;
-------------------
--unload 5
@@ -365,34 +365,34 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs_4.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_demographics_salt'
+INTO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_4.gz" >> LOG018 ;
-sh regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_4.gz >> LOG018 ;
+sh echo "regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_4.gz" >> LOG018 ;
+sh regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_4.gz >> LOG018 ;
log LOG018;
--exp4
explain options 'f'
-UNLOAD EXTRACT TO '/bulkload/customer_demographics_salt'
+UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
--unload 6
UNLOAD
WITH PURGEDATA FROM TARGET
---MERGE FILE '/bulkload/merged_customer_demogs_2.gz' OVERWRITE
+--MERGE FILE '/user/trafodion/bulkload/merged_customer_demogs_2.gz' OVERWRITE
--COMPRESSION GZIP
-INTO '/bulkload/customer_demographics_salt'
+INTO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -cat /bulkload/customer_demographics_salt/file* | wc -l" >> LOG018 ;
-sh regrhadoop.ksh fs -cat /bulkload/customer_demographics_salt/file* | wc -l >> LOG018 ;
-sh echo "regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/file* | grep file | wc -l" >> LOG018 ;
-sh regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/file* | grep file | wc -l >> LOG018 ;
+sh echo "regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics_salt/file* | wc -l" >> LOG018 ;
+sh regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics_salt/file* | wc -l >> LOG018 ;
+sh echo "regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/file* | grep file | wc -l" >> LOG018 ;
+sh regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/file* | grep file | wc -l >> LOG018 ;
log LOG018;
--unload 7
@@ -400,15 +400,15 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs_2.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_demographics_salt'
+INTO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
-sh regrhadoop.ksh fs -copyToLocal /bulkload/customer_demographics_salt/merged_customer_demogs_2.gz /tmp ;
+sh regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_2.gz /tmp ;
sh gunzip -f /tmp/merged_customer_demogs_2.gz ;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/* ;
-sh regrhadoop.ksh fs -copyFromLocal /tmp/merged_customer_demogs_2 /user/hive/exttables/unload_customer_demographics ;
-sh rm /bulkload/merged_customer_demogs_2 ;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/* ;
+sh regrhadoop.ksh fs -copyFromLocal /tmp/merged_customer_demogs_2 /user/trafodion/hive/exttables/unload_customer_demographics ;
+sh rm /user/trafodion/bulkload/merged_customer_demogs_2 ;
cqd HIVE_MAX_STRING_LENGTH_IN_BYTES '100';
select [first 100] * from hive.hive.unload_customer_demographics where cd_demo_sk <200 order by cd_demo_sk;
@@ -420,12 +420,12 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs_4.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_demographics_salt'
+INTO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
log;
-sh echo "regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/merged* | grep merge | wc -l" >> LOG018 ;
-sh regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/merged* | grep merge | wc -l >> LOG018 ;
+sh echo "regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/merged* | grep merge | wc -l" >> LOG018 ;
+sh regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/merged* | grep merge | wc -l >> LOG018 ;
log LOG018;
@@ -435,7 +435,7 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demogs_2' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_demographics_salt'
+INTO '/user/trafodion/bulkload/customer_demographics_salt'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
@@ -446,7 +446,7 @@ UNLOAD
WITH PURGEDATA FROM TARGET
MERGE FILE 'merged_customer_demographics' OVERWRITE
--COMPRESSION GZIP
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
--sh sleep 10;
@@ -458,7 +458,7 @@ UNLOAD
WITH PURGEDATA FROM TARGET
--MERGE FILE 'merged_customer_demographics' OVERWRITE
--COMPRESSION GZIP
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt
<<+ cardinality 10e10 >>;
--sh sleep 10;
@@ -468,7 +468,7 @@ select [first 20] * from hive.hive.unload_customer_demographics where cd_demo_sk
--unload 12
UNLOAD
WITH PURGEDATA FROM TARGET
-INTO '/user/hive/exttables/unload_customer_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_address'
select * from trafodion.hbase.customer_address ;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_address;
@@ -478,7 +478,7 @@ select [first 20] * from hive.hive.unload_customer_address where ca_address_sk <
--test with numeric delimiers
UNLOAD
WITH PURGEDATA FROM TARGET DELIMITER 124 RECORD_SEPARATOR 10
-INTO '/user/hive/exttables/unload_customer_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_address'
select * from trafodion.hbase.customer_address ;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_address;
@@ -493,7 +493,7 @@ UNLOAD
WITH PURGEDATA FROM TARGET
--MERGE FILE 'merged_customer_demographics' OVERWRITE
--COMPRESSION GZIP
-INTO '/user/hive/exttables/unload_customer'
+INTO '/user/trafodion/hive/exttables/unload_customer'
select * from trafodion.hbase.customer_salt;
--sh sleep 10;
select count(*) from hive.hive.unload_customer;
@@ -504,7 +504,7 @@ UNLOAD
WITH PURGEDATA FROM TARGET
--MERGE FILE 'merged_customer_demographics' OVERWRITE
--COMPRESSION GZIP
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_demographics;
@@ -516,7 +516,7 @@ WITH
PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
MERGE FILE 'merged_customer_address.gz' OVERWRITE
COMPRESSION GZIP
-INTO '/bulkload/customer_address'
+INTO '/user/trafodion/bulkload/customer_address'
select * from trafodion.hbase.customer_address where ca_address_sk < 100;
--unload 16
@@ -525,13 +525,13 @@ WITH
PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
MERGE FILE 'merged_customer_address.gz'
COMPRESSION GZIP
-INTO '/bulkload/customer_address'
+INTO '/user/trafodion/bulkload/customer_address'
select * from trafodion.hbase.customer_address where ca_address_sk < 100;
log;
-sh echo "regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/*" >> LOG018 ;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/* ;
+sh echo "regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/*" >> LOG018 ;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/* ;
log LOG018;
@@ -541,7 +541,7 @@ cqd attempt_esp_parallelism reset;
UNLOAD
WITH
PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n'
-INTO '/user/hive/exttables/unload_store_sales_summary'
+INTO '/user/trafodion/hive/exttables/unload_store_sales_summary'
select ss_sold_date_sk,ss_store_sk, sum (ss_quantity) from store_sales_salt group by ss_sold_date_sk ,ss_store_sk;
--sh sleep 10;
select [first 100] * from hive.hive.unload_store_sales_summary order by ss_sold_date_sk,ss_store_sk;
@@ -549,7 +549,7 @@ select [first 100] * from hive.hive.unload_store_sales_summary order by ss_sol
--unload 18
UNLOAD
WITH PURGEDATA FROM TARGET
-INTO '/user/hive/exttables/unload_customer_and_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_and_address'
select * from trafodion.hbase.customer_salt c join trafodion.hbase.customer_address ca on c.c_current_addr_sk = ca.ca_address_sk ;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_and_address;
@@ -559,7 +559,7 @@ select [first 20] * from hive.hive.unload_customer_and_address order by ca_addre
UNLOAD
WITH
PURGEDATA FROM TARGET
-INTO '/user/hive/exttables/unload_customer_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_address'
select * from customer_address where ca_address_sk < 1000 union select * from customer_address where ca_address_sk > 40000 and ca_address_sk < 41000;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_address;
@@ -629,14 +629,14 @@ cqd TRAF_TABLE_SNAPSHOT_SCAN_TABLE_SIZE_THRESHOLD '0';
cqd comp_bool_226 'on'; -- allow the extract syntax
explain options 'f'
-UNLOAD EXTRACT TO '/bulkload/customer_address'
+UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_address'
select * from trafodion.hbase.customer_address <<+ cardinality 10e10 >>;
cqd comp_bool_226 reset;
UNLOAD
WITH PURGEDATA FROM TARGET
EXISTING SNAPSHOT HAVING SUFFIX 'SNAP111'
-INTO '/user/hive/exttables/unload_customer_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_address'
select * from customer_address
<<+ cardinality 10e10 >>;
@@ -647,14 +647,14 @@ select [first 20] * from hive.hive.unload_customer_address where ca_address_sk
cqd comp_bool_226 'on'; -- allow the extract syntax
explain options 'f'
-UNLOAD EXTRACT TO '/user/hive/exttables/unload_customer_demographics'
+UNLOAD EXTRACT TO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
cqd comp_bool_226 reset;
UNLOAD
WITH PURGEDATA FROM TARGET
EXISTING SNAPSHOT HAVING SUFFIX 'SNAP111'
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
select count(*) from hive.hive.unload_customer_demographics;
@@ -664,7 +664,7 @@ select [first 20] * from hive.hive.unload_customer_demographics where cd_demo_sk
UNLOAD
WITH PURGEDATA FROM TARGET
NEW SNAPSHOT HAVING SUFFIX 'SNAP112'
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
select count(*) from hive.hive.unload_customer_demographics;
@@ -674,7 +674,7 @@ select [first 20] * from hive.hive.unload_customer_demographics where cd_demo_sk
UNLOAD
WITH PURGEDATA FROM TARGET
NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
select count(*) from hive.hive.unload_customer_demographics;
@@ -685,7 +685,7 @@ UNLOAD
WITH
PURGEDATA FROM TARGET
NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-INTO '/user/hive/exttables/unload_customer_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_address'
select * from customer_address where ca_address_sk < 1000 union select * from customer_address where ca_address_sk > 40000 and ca_address_sk < 41000;
select count(*) from hive.hive.unload_customer_address;
@@ -696,7 +696,7 @@ select [first 20] * from hive.hive.unload_customer_address order by ca_address_s
UNLOAD
WITH PURGEDATA FROM TARGET
NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-INTO '/user/hive/exttables/unload_customer_and_address'
+INTO '/user/trafodion/hive/exttables/unload_customer_and_address'
select * from trafodion.hbase.customer_salt c join trafodion.hbase.customer_address ca on c.c_current_addr_sk = ca.ca_address_sk ;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_and_address;
@@ -705,14 +705,14 @@ select [first 20] * from hive.hive.unload_customer_and_address order by ca_addre
--unload 26 --test with index scan
cqd comp_bool_226 'on'; -- allow the extract syntax
explain options 'f'
-UNLOAD EXTRACT TO '/bulkload/customer_name'
+UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_name'
select c_first_name,c_last_name from trafodion.hbase.customer_salt;
cqd comp_bool_226 reset;
UNLOAD
WITH PURGEDATA FROM TARGET
NEW SNAPSHOT HAVING SUFFIX 'SNAP111'
-INTO '/user/hive/exttables/unload_customer_name'
+INTO '/user/trafodion/hive/exttables/unload_customer_name'
select c_first_name,c_last_name from trafodion.hbase.customer_salt;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_name;
@@ -723,38 +723,38 @@ select [first 20] * from hive.hive.unload_customer_name order by c_first_name,c_
unload into '//\a//c' select * from CUSTOMER_ADDRESS;
--unload 101 --should give syntax error
-unload with delimiter 0 into '/bulkload/test' select * from CUSTOMER_ADDRESS;
+unload with delimiter 0 into '/user/trafodion/bulkload/test' select * from CUSTOMER_ADDRESS;
--unload 102 --should give an error
-unload with MERGE FILE 'folder/cust_addr' into '/bulkload/test' select * from customer_address;
+unload with MERGE FILE 'folder/cust_addr' into '/user/trafodion/bulkload/test' select * from customer_address;
--unload 103 -- should not give an error
-unload with delimiter '\a' into '/bulkload/test' select * from customer_address;
+unload with delimiter '\a' into '/user/trafodion/bulkload/test' select * from customer_address;
--unload 24 -- should give an error
-unload with delimiter 'abca' into '/bulkload/test' select * from customer_address;
+unload with delimiter 'abca' into '/user/trafodion/bulkload/test' select * from customer_address;
--unload 104 -- should give an error
-unload with record_separator '\abca' into '/bulkload/test' select * from customer_address;
+unload with record_separator '\abca' into '/user/trafodion/bulkload/test' select * from customer_address;
--unload 105 -- should give an error
-unload with record_separator '\z' into '/bulkload/test' select * from customer_address;
+unload with record_separator '\z' into '/user/trafodion/bulkload/test' select * from customer_address;
--unload 106 --should give error
-unload into '/bulkload/test' select * from customer_address order by ca_address_id;
+unload into '/user/trafodion/bulkload/test' select * from customer_address order by ca_address_id;
--unload 107 --should give error
cqd comp_bool_226 'on';
-unload extract to '/bulkload/test' select * from customer_address order by ca_address_id;
+unload extract to '/user/trafodion/bulkload/test' select * from customer_address order by ca_address_id;
cqd comp_bool_226 reset;
--unload 150
log;
-sh echo "regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/*" >> LOG018 ;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/* ;
+sh echo "regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/*" >> LOG018 ;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/* ;
log LOG018;
CQD TRAF_UNLOAD_SKIP_WRITING_TO_FILES 'ON';
UNLOAD
WITH PURGEDATA FROM TARGET
-INTO '/user/hive/exttables/unload_customer_demographics'
+INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
(select * from trafodion.hbase.customer_demographics_salt) ;
--sh sleep 10;
select count(*) from hive.hive.unload_customer_demographics;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST018_create_hive_tables.hive
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST018_create_hive_tables.hive b/core/sql/regress/hive/TEST018_create_hive_tables.hive
index 8536030..5d60784 100644
--- a/core/sql/regress/hive/TEST018_create_hive_tables.hive
+++ b/core/sql/regress/hive/TEST018_create_hive_tables.hive
@@ -37,7 +37,7 @@ create external table unload_customer_address
ca_location_type string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/unload_customer_address';
+location '/user/trafodion/hive/exttables/unload_customer_address';
drop table unload_customer_demographics;
@@ -54,7 +54,7 @@ create external table unload_customer_demographics
cd_dep_college_count int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/unload_customer_demographics';
+location '/user/trafodion/hive/exttables/unload_customer_demographics';
drop table unload_customer;
create external table unload_customer
@@ -79,7 +79,7 @@ create external table unload_customer
c_last_review_date string
)
row format delimited fields terminated by '|' LINES TERMINATED BY '\n'
-location '/user/hive/exttables/unload_customer';
+location '/user/trafodion/hive/exttables/unload_customer';
drop table unload_customer_name;
create external table unload_customer_name
@@ -88,7 +88,7 @@ create external table unload_customer_name
c_last_name string
)
row format delimited fields terminated by '|' LINES TERMINATED BY '\n'
-location '/user/hive/exttables/unload_customer_name';
+location '/user/trafodion/hive/exttables/unload_customer_name';
drop table unload_customer_and_address;
create external table unload_customer_and_address
@@ -126,7 +126,7 @@ create external table unload_customer_and_address
ca_location_type string
)
row format delimited fields terminated by '|' LINES TERMINATED BY '\n'
-location '/user/hive/exttables/unload_customer_and_address';
+location '/user/trafodion/hive/exttables/unload_customer_and_address';
drop table tmp_unload_table;
create external table tmp_unload_table
@@ -134,7 +134,7 @@ create external table tmp_unload_table
filed string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/tmp_unload_table';
+location '/user/trafodion/hive/exttables/tmp_unload_table';
drop table unload_store_sales_summary;
create external table unload_store_sales_summary
@@ -146,7 +146,7 @@ create external table unload_store_sales_summary
ss_quantity int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/unload_store_sales_summary';
+location '/user/trafodion/hive/exttables/unload_store_sales_summary';
drop table null_format_default;
create external table null_format_default (a string, b string);
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/sqlcomp/nadefaults.cpp
----------------------------------------------------------------------
diff --git a/core/sql/sqlcomp/nadefaults.cpp b/core/sql/sqlcomp/nadefaults.cpp
index 6c7e75c..38b1feb 100644
--- a/core/sql/sqlcomp/nadefaults.cpp
+++ b/core/sql/sqlcomp/nadefaults.cpp
@@ -1956,7 +1956,7 @@ SDDkwd__(EXE_DIAGNOSTIC_EVENTS, "OFF"),
DDkwd__(HIVE_DEFAULT_CHARSET, (char *)SQLCHARSETSTRING_UTF8),
DD_____(HIVE_DEFAULT_SCHEMA, "HIVE"),
DD_____(HIVE_FILE_CHARSET, ""),
- DD_____(HIVE_FILE_NAME, "/hive/tpcds/customer/customer.dat" ),
+ DD_____(HIVE_FILE_NAME, "/user/trafodion/hive/tpcds/customer/customer.dat" ),
DD_____(HIVE_HDFS_STATS_LOG_FILE, ""),
DDui___(HIVE_INSERT_ERROR_MODE, "1"),
DDint__(HIVE_LIB_HDFS_PORT_OVERRIDE, "-1"),
@@ -2090,7 +2090,7 @@ SDDkwd__(ISO_MAPPING, (char *)SQLCHARSETSTRING_ISO88591),
// (unused)default size is 32000. Change this to extract more data into memory.
DDui___(LOB_OUTPUT_SIZE, "32000"),
- DD_____(LOB_STORAGE_FILE_DIR, "/lobs"),
+ DD_____(LOB_STORAGE_FILE_DIR, "/user/trafodion/lobs"),
// storage types defined in exp/ExpLOBenum.h.
// Default is hdfs_file (value = 1)
@@ -3341,7 +3341,7 @@ XDDkwd__(SUBQUERY_UNNESTING, "ON"),
DDkwd__(TRAF_LOAD_CONTINUE_ON_ERROR, "OFF"),
DD_____(TRAF_LOAD_ERROR_COUNT_ID, "" ),
DD_____(TRAF_LOAD_ERROR_COUNT_TABLE, "ERRORCOUNTER" ),
- DD_____(TRAF_LOAD_ERROR_LOGGING_LOCATION, "/bulkload/logs" ),
+ DD_____(TRAF_LOAD_ERROR_LOGGING_LOCATION, "/user/trafodion/bulkload/logs" ),
DDint__(TRAF_LOAD_FLUSH_SIZE_IN_KB, "1024"),
DDkwd__(TRAF_LOAD_FORCE_CIF, "ON"),
DDkwd__(TRAF_LOAD_LOG_ERROR_ROWS, "OFF"),
@@ -3356,7 +3356,7 @@ XDDkwd__(SUBQUERY_UNNESTING, "ON"),
//need add code to check if folder exists or not. if not issue an error and ask
//user to create it
- DD_____(TRAF_LOAD_PREP_TMP_LOCATION, "/bulkload/" ),
+ DD_____(TRAF_LOAD_PREP_TMP_LOCATION, "/user/trafodion/bulkload/" ),
DDkwd__(TRAF_LOAD_TAKE_SNAPSHOT , "OFF"),
DDkwd__(TRAF_LOAD_USE_FOR_INDEXES, "ON"),
DDkwd__(TRAF_LOAD_USE_FOR_STATS, "OFF"),
@@ -3378,7 +3378,7 @@ XDDkwd__(SUBQUERY_UNNESTING, "ON"),
DDkwd__(TRAF_READ_OBJECT_DESC, "OFF"),
DDkwd__(TRAF_RELOAD_NATABLE_CACHE, "OFF"),
- DD_____(TRAF_SAMPLE_TABLE_LOCATION, "/sample/"),
+ DD_____(TRAF_SAMPLE_TABLE_LOCATION, "/user/trafodion/sample/"),
DDint__(TRAF_SEQUENCE_CACHE_SIZE, "-1"),
DDkwd__(TRAF_SIMILARITY_CHECK, "ROOT"),
@@ -3404,7 +3404,7 @@ XDDkwd__(SUBQUERY_UNNESTING, "ON"),
//timeout before we give up when trying to create the snapshot scanner
DDint__(TRAF_TABLE_SNAPSHOT_SCAN_TIMEOUT, "6000"),
//location for temporary links and files produced by snapshot scan
- DD_____(TRAF_TABLE_SNAPSHOT_SCAN_TMP_LOCATION, "/bulkload/"),
+ DD_____(TRAF_TABLE_SNAPSHOT_SCAN_TMP_LOCATION, "/user/trafodion/bulkload/"),
DDkwd__(TRAF_TINYINT_INPUT_PARAMS, "OFF"),
DDkwd__(TRAF_TINYINT_RETURN_VALUES, "OFF"),
[8/8] incubator-trafodion git commit: Merge remote branch
'origin/pr/1083/head' into merge_1083
Posted by sa...@apache.org.
Merge remote branch 'origin/pr/1083/head' into merge_1083
Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/33a9005d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/33a9005d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/33a9005d
Branch: refs/heads/master
Commit: 33a9005d2fe2f790ad18e6f93af504cd3e1d31eb
Parents: f60c1b0 a14a3ee
Author: Sandhya Sundaresan <sa...@apache.org>
Authored: Mon May 8 20:28:03 2017 +0000
Committer: Sandhya Sundaresan <sa...@apache.org>
Committed: Mon May 8 20:28:03 2017 +0000
----------------------------------------------------------------------
.../run_full_trafodion_backup.sh | 2 +-
.../sql/scripts/install_hadoop_regr_test_env | 102 +--
core/sqf/sql/scripts/install_local_hadoop | 15 +-
core/sql/optimizer/OptimizerSimulator.cpp | 2 +-
core/sql/regress/executor/EXPECTED130 | 220 +++---
core/sql/regress/executor/TEST130 | 90 +--
core/sql/regress/hive/EXPECTED003 | 6 +-
core/sql/regress/hive/EXPECTED005 | 104 +--
core/sql/regress/hive/EXPECTED009 | 22 +-
core/sql/regress/hive/EXPECTED018 | 717 ++++++++++---------
core/sql/regress/hive/FILTER005 | 4 +-
core/sql/regress/hive/TEST003 | 52 +-
.../hive/TEST003_create_hive_tables.hive | 24 +-
core/sql/regress/hive/TEST005 | 48 +-
core/sql/regress/hive/TEST005_a.hive.sql | 16 +-
core/sql/regress/hive/TEST009 | 6 +-
core/sql/regress/hive/TEST009_a.hive.sql | 4 +-
core/sql/regress/hive/TEST009_b.hive.sql | 2 +-
core/sql/regress/hive/TEST018 | 148 ++--
.../hive/TEST018_create_hive_tables.hive | 14 +-
core/sql/sqlcomp/nadefaults.cpp | 12 +-
21 files changed, 801 insertions(+), 809 deletions(-)
----------------------------------------------------------------------
[7/8] incubator-trafodion git commit: Expected file change for TEST130
Posted by sa...@apache.org.
Expected file change for TEST130
Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/a14a3ee1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/a14a3ee1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/a14a3ee1
Branch: refs/heads/master
Commit: a14a3ee1040fb62c149a2888e8a94bcce4095f4b
Parents: 4b27589
Author: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Authored: Thu May 4 05:52:10 2017 +0000
Committer: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Committed: Thu May 4 05:52:10 2017 +0000
----------------------------------------------------------------------
core/sql/regress/executor/EXPECTED130 | 220 ++++++++++++++---------------
core/sql/regress/executor/TEST130 | 2 +-
2 files changed, 106 insertions(+), 116 deletions(-)
----------------------------------------------------------------------
[3/8] incubator-trafodion git commit: Changes to move all Trafodion
created hdfs files under /user/trafodion
Posted by sa...@apache.org.
Changes to move all Trafodion created hdfs files under /user/trafodion
Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/f094aa0d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/f094aa0d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/f094aa0d
Branch: refs/heads/master
Commit: f094aa0d144046118253b4642a038b5ea5446ae4
Parents: b36003c
Author: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Authored: Tue May 2 17:16:45 2017 +0000
Committer: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Committed: Tue May 2 17:16:45 2017 +0000
----------------------------------------------------------------------
.../run_full_trafodion_backup.sh | 2 +-
.../sql/scripts/install_hadoop_regr_test_env | 102 +--
core/sqf/sql/scripts/install_local_hadoop | 15 +-
core/sql/optimizer/OptimizerSimulator.cpp | 2 +-
core/sql/regress/executor/TEST130 | 86 +--
core/sql/regress/hive/EXPECTED003 | 6 +-
core/sql/regress/hive/EXPECTED005 | 104 +--
core/sql/regress/hive/EXPECTED009 | 22 +-
core/sql/regress/hive/EXPECTED018 | 717 ++++++++++---------
core/sql/regress/hive/FILTER005 | 4 +-
core/sql/regress/hive/TEST003 | 52 +-
.../hive/TEST003_create_hive_tables.hive | 24 +-
core/sql/regress/hive/TEST005 | 48 +-
core/sql/regress/hive/TEST005_a.hive.sql | 16 +-
core/sql/regress/hive/TEST009 | 6 +-
core/sql/regress/hive/TEST009_a.hive.sql | 4 +-
core/sql/regress/hive/TEST009_b.hive.sql | 2 +-
core/sql/regress/hive/TEST018 | 148 ++--
.../hive/TEST018_create_hive_tables.hive | 14 +-
core/sql/sqlcomp/nadefaults.cpp | 12 +-
20 files changed, 693 insertions(+), 693 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sqf/hbase_utilities/backup_and_restore/run_full_trafodion_backup.sh
----------------------------------------------------------------------
diff --git a/core/sqf/hbase_utilities/backup_and_restore/run_full_trafodion_backup.sh b/core/sqf/hbase_utilities/backup_and_restore/run_full_trafodion_backup.sh
index 4ed4e7a..d36f665 100755
--- a/core/sqf/hbase_utilities/backup_and_restore/run_full_trafodion_backup.sh
+++ b/core/sqf/hbase_utilities/backup_and_restore/run_full_trafodion_backup.sh
@@ -139,7 +139,7 @@ echo "hdfs_uri: ${hdfs_uri}"
echo "hdfs_backup_location:${hdfs_backup_location}"
# if hdfs backup location is empty generate one
if [[ -z "$hdfs_backup_location" ]] ; then
- new_path=${hdfs_uri}/trafodion_backups/backup_${date_str}
+ new_path=${hdfs_uri}/user/trafodion/trafodion_backups/backup_${date_str}
confirm_choice "Would you like to use this path as the backup folder: ${new_path} ?"
if [[ $? -ne 0 ]]; then
echo "***[ERROR]: New path ${new_path} could not be validated." | tee -a ${log_file}
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sqf/sql/scripts/install_hadoop_regr_test_env
----------------------------------------------------------------------
diff --git a/core/sqf/sql/scripts/install_hadoop_regr_test_env b/core/sqf/sql/scripts/install_hadoop_regr_test_env
index 7625dfe..2375560 100755
--- a/core/sqf/sql/scripts/install_hadoop_regr_test_env
+++ b/core/sqf/sql/scripts/install_hadoop_regr_test_env
@@ -212,31 +212,31 @@ fi
./dsdgen -force $FORCE -dir $MY_TPCDS_DATA_DIR -scale $SCALE -table reason >>${MY_LOG_FILE} 2>&1
./dsdgen -force $FORCE -dir $MY_TPCDS_DATA_DIR -scale $SCALE -table ship_mode >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/date_dim >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/time_dim >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/item >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/customer >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/customer_demographics >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/household_demographics >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/customer_address >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/store >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/promotion >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/store_sales >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/ship_mode >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/reason >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/income_band >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/warehouse >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/web_page >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/web_site >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/catalog_page >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/call_center >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/inventory >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/web_returns >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/web_sales >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/catalog_returns >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/catalog_sales >>${MY_LOG_FILE} 2>&1
- $MY_HDFS_CMD dfs -mkdir -p /hive/tpcds/store_returns >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/date_dim >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/time_dim >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/item >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/customer >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/customer_demographics >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/household_demographics >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/customer_address >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/store >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/promotion >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/store_sales >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/ship_mode >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/reason >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/income_band >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/warehouse >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/web_page >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/web_site >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/catalog_page >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/call_center >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/inventory >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/web_returns >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/web_sales >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/catalog_returns >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/catalog_sales >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -mkdir -p /user/trafodion/hive/tpcds/store_returns >>${MY_LOG_FILE} 2>&1
cd $MY_TPCDS_DATA_DIR
@@ -256,10 +256,10 @@ fi
echo "Copying generated data to HDFS..."
for t in date_dim time_dim item customer customer_demographics household_demographics customer_address store promotion store_sales store_returns catalog_sales catalog_returns web_sales web_returns inventory call_center catalog_page web_site web_page warehouse income_band reason reason
do
- $MY_HDFS_CMD dfs -put ${t}.dat /hive/tpcds/${t} >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -put ${t}.dat /user/trafodion/hive/tpcds/${t} >>${MY_LOG_FILE} 2>&1
done
- $MY_HDFS_CMD dfs -ls -R /hive/tpcds/*/*.dat >>${MY_LOG_FILE} 2>&1
+ $MY_HDFS_CMD dfs -ls -R /user/trafodion/hive/tpcds/*/*.dat >>${MY_LOG_FILE} 2>&1
echo "Creating tables in Hive..." | tee -a ${MY_LOG_FILE}
$MY_HIVE_CMD <<EOF >>${MY_LOG_FILE} 2>&1
@@ -292,7 +292,7 @@ create external table store_sales
ss_net_profit float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/store_sales';
+location '/user/trafodion/hive/tpcds/store_sales';
create external table customer_demographics
(
@@ -307,7 +307,7 @@ create external table customer_demographics
cd_dep_college_count int
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/customer_demographics';
+location '/user/trafodion/hive/tpcds/customer_demographics';
create external table date_dim
(
@@ -341,7 +341,7 @@ create external table date_dim
d_current_year string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/date_dim';
+location '/user/trafodion/hive/tpcds/date_dim';
create external table time_dim
(
@@ -357,7 +357,7 @@ create external table time_dim
t_meal_time string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/time_dim';
+location '/user/trafodion/hive/tpcds/time_dim';
create external table item
(
@@ -385,7 +385,7 @@ create external table item
i_product_name string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/item';
+location '/user/trafodion/hive/tpcds/item';
create external table store
(
@@ -420,7 +420,7 @@ create external table store
s_tax_precentage float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/store';
+location '/user/trafodion/hive/tpcds/store';
create external table customer
(
@@ -444,7 +444,7 @@ create external table customer
c_last_review_date string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/customer';
+location '/user/trafodion/hive/tpcds/customer';
create external table promotion
(
@@ -469,7 +469,7 @@ create external table promotion
p_discount_active string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/promotion';
+location '/user/trafodion/hive/tpcds/promotion';
create external table household_demographics
(
@@ -480,7 +480,7 @@ create external table household_demographics
hd_vehicle_count int
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/household_demographics';
+location '/user/trafodion/hive/tpcds/household_demographics';
create external table customer_address
(
@@ -499,7 +499,7 @@ create external table customer_address
ca_location_type string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/customer_address';
+location '/user/trafodion/hive/tpcds/customer_address';
create table store_orc stored as orc as select * from store;
@@ -527,7 +527,7 @@ create external table store_returns
sr_net_loss float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/store_returns';
+location '/user/trafodion/hive/tpcds/store_returns';
create external table catalog_sales
@@ -568,7 +568,7 @@ create external table catalog_sales
cs_net_profit float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/catalog_sales';
+location '/user/trafodion/hive/tpcds/catalog_sales';
create external table catalog_returns
@@ -602,7 +602,7 @@ create external table catalog_returns
cr_net_loss float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/catalog_returns';
+location '/user/trafodion/hive/tpcds/catalog_returns';
create external table web_sales
(
@@ -642,7 +642,7 @@ create external table web_sales
ws_net_profit float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/web_sales';
+location '/user/trafodion/hive/tpcds/web_sales';
create external table web_returns
(
@@ -672,7 +672,7 @@ create external table web_returns
wr_net_loss float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/web_returns';
+location '/user/trafodion/hive/tpcds/web_returns';
create external table inventory
(
@@ -682,7 +682,7 @@ create external table inventory
inv_quantity_on_hand int
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/inventory';
+location '/user/trafodion/hive/tpcds/inventory';
create external table call_center
(
@@ -719,7 +719,7 @@ create external table call_center
cc_tax_percentage float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/call_center';
+location '/user/trafodion/hive/tpcds/call_center';
create external table catalog_page
(
@@ -734,7 +734,7 @@ create external table catalog_page
cp_type string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/catalog_page';
+location '/user/trafodion/hive/tpcds/catalog_page';
create external table web_site
(
@@ -766,7 +766,7 @@ create external table web_site
web_tax_percentage float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/web_site';
+location '/user/trafodion/hive/tpcds/web_site';
create external table Web_page
(
@@ -786,7 +786,7 @@ create external table Web_page
wp_max_ad_count int
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/web_page';
+location '/user/trafodion/hive/tpcds/web_page';
create external table warehouse
(
@@ -806,7 +806,7 @@ create external table warehouse
w_gmt_offset float
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/warehouse';
+location '/user/trafodion/hive/tpcds/warehouse';
create external table income_band
(
@@ -815,7 +815,7 @@ create external table income_band
ib_upper_bound int
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/income_band';
+location '/user/trafodion/hive/tpcds/income_band';
create external table reason
(
@@ -824,7 +824,7 @@ create external table reason
r_reason_desc string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/reason';
+location '/user/trafodion/hive/tpcds/reason';
create external table ship_mode
(
@@ -836,7 +836,7 @@ create external table ship_mode
sm_contract string
)
row format delimited fields terminated by '|'
-location '/hive/tpcds/ship_mode';
+location '/user/trafodion/hive/tpcds/ship_mode';
quit;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sqf/sql/scripts/install_local_hadoop
----------------------------------------------------------------------
diff --git a/core/sqf/sql/scripts/install_local_hadoop b/core/sqf/sql/scripts/install_local_hadoop
index a96bf31..0a42a60 100755
--- a/core/sqf/sql/scripts/install_local_hadoop
+++ b/core/sqf/sql/scripts/install_local_hadoop
@@ -1244,17 +1244,16 @@ EOF
echo "Creating HDFS directories" 2>&1 | tee -a ${MY_LOG_FILE}
bin/hdfs dfs -mkdir /tmp >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /user >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /user/$USER >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /user/hive >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -mkdir /user >>${MY_LOG_FILE} 2>&1
bin/hdfs dfs -mkdir /user/trafodion >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /bulkload >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -mkdir /user/trafodion/$USER >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -mkdir /user/trafodion/hive >>${MY_LOG_FILE} 2>&1
bin/hdfs dfs -mkdir /user/trafodion/bulkload >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /user/hive/warehouse >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -mkdir /hive >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -mkdir /user/trafodion/hive/warehouse >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -mkdir /user/trafodion/hive >>${MY_LOG_FILE} 2>&1
bin/hdfs dfs -chmod g+w /tmp >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -chmod g+w /user/hive/warehouse >>${MY_LOG_FILE} 2>&1
- bin/hdfs dfs -chmod g+w /bulkload >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -chmod g+w /user/trafodion/hive/warehouse >>${MY_LOG_FILE} 2>&1
+ bin/hdfs dfs -chmod g+w /user/traofdion/bulkload >>${MY_LOG_FILE} 2>&1
bin/hdfs dfs -chmod g+w /user/trafodion/bulkload >>${MY_LOG_FILE} 2>&1
bin/hadoop fs -ls -R / 2>&1 | tee -a ${MY_LOG_FILE}
echo "Done: Creating HDFS directories" 2>&1 | tee -a ${MY_LOG_FILE}
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/optimizer/OptimizerSimulator.cpp
----------------------------------------------------------------------
diff --git a/core/sql/optimizer/OptimizerSimulator.cpp b/core/sql/optimizer/OptimizerSimulator.cpp
index e77f2ae..6767f84 100644
--- a/core/sql/optimizer/OptimizerSimulator.cpp
+++ b/core/sql/optimizer/OptimizerSimulator.cpp
@@ -65,7 +65,7 @@ extern THREAD_P NAClusterInfo *gpClusterInfo;
extern const WordAsBits SingleBitArray[];
//the dir path should start from /bulkload
-#define UNLOAD_HDFS_DIR "/bulkload/osim_capture"
+#define UNLOAD_HDFS_DIR "/user/trafodion/bulkload/osim_capture"
static ULng32 hashFunc_int(const Int32& Int)
{
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/executor/TEST130
----------------------------------------------------------------------
diff --git a/core/sql/regress/executor/TEST130 b/core/sql/regress/executor/TEST130
index 0a64476..bdcd6a1 100755
--- a/core/sql/regress/executor/TEST130
+++ b/core/sql/regress/executor/TEST130
@@ -315,9 +315,9 @@ drop table tlob130bin2;
sh rm TMP130;
sh rm tlob130txt2;
sh rm tlob130_txt2.txt;
-sh regrhadoop.ksh fs -rm /lobs/tlob130_deep.jpg;
-sh regrhadoop.ksh fs -rm /lobs/tlob130_anoush.jpg;
-sh regrhadoop.ksh fs -rm /lobs/tlob130_txt2.txt;
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/tlob130_deep.jpg;
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/tlob130_anoush.jpg;
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/tlob130_txt2.txt;
?section lob_hdfs_file
@@ -325,35 +325,35 @@ log LOG130;
create table tlob130txt2 (c1 int not null, c2 clob, primary key (c1));
create table tlob130bin2 (c1 int not null, c2 blob, primary key (c1));
-sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /lobs/lob_input_a1.txt;
-sh regrhadoop.ksh fs -copyFromLocal lob_input_b1.txt /lobs/lob_input_b1.txt;
-sh regrhadoop.ksh fs -copyFromLocal lob_input_c1.txt /lobs/lob_input_c1.txt;
-sh regrhadoop.ksh fs -copyFromLocal lob_input_d1.txt /lobs/lob_input_d1.txt;
-sh regrhadoop.ksh fs -copyFromLocal lob_input_e1.txt /lobs/lob_input_e1.txt;
-sh regrhadoop.ksh fs -copyFromLocal deep.jpg /lobs/deep.jpg;
-sh regrhadoop.ksh fs -copyFromLocal anoush.jpg /lobs/anoush.jpg;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /user/trafodion/lobs/lob_input_a1.txt;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_b1.txt /user/trafodion/lobs/lob_input_b1.txt;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_c1.txt /user/trafodion/lobs/lob_input_c1.txt;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_d1.txt /user/trafodion/lobs/lob_input_d1.txt;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_e1.txt /user/trafodion/lobs/lob_input_e1.txt;
+sh regrhadoop.ksh fs -copyFromLocal deep.jpg /user/trafodion/lobs/deep.jpg;
+sh regrhadoop.ksh fs -copyFromLocal anoush.jpg /user/trafodion/lobs/anoush.jpg;
-- the next one is a really long file name intended to test error message 8557
-sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
+sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
sh sleep(20);
-insert into tlob130txt2 values (1, filetolob('hdfs:///lobs/lob_input_a1.txt'));
+insert into tlob130txt2 values (1, filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
-- second line
-insert into tlob130txt2 values (2, filetolob('hdfs:///lobs/lob_input_b1.txt'));
+insert into tlob130txt2 values (2, filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'));
-- third line
-insert into tlob130txt2 values (3, filetolob('hdfs:///lobs/lob_input_c1.txt'));
+insert into tlob130txt2 values (3, filetolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'));
select lobtostring(c2, 40 ) from tlob130txt2;
--updates
--should update with full poem
-update tlob130txt2 set c2=filetolob('hdfs:///lobs/lob_input_d1.txt', append) where c1 = 3;
+update tlob130txt2 set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_d1.txt', append) where c1 = 3;
select lobtostring(c2, 200 ) from tlob130txt2;
-- should see wrong text in the last few lines
-update tlob130txt2 set c2=filetolob('hdfs:///lobs/lob_input_e1.txt') where c1 =3 ;
+update tlob130txt2 set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_e1.txt') where c1 =3 ;
select lobtostring(c2, 200 ) from tlob130txt2;
--delete
@@ -372,7 +372,7 @@ log;
log LOG130;
sh rm t130_extract_command;
-sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_txt2.txt');/g" >> t130_extract_command;
+sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_txt2.txt');/g" >> t130_extract_command;
obey t130_extract_command;
@@ -388,7 +388,7 @@ log;
log LOG130;
sh rm t130_extract_command;
-sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_deep.jpg');/g" >> t130_extract_command;
+sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_deep.jpg');/g" >> t130_extract_command;
obey t130_extract_command;
update tlob130bin2 set c2=filetolob('anoush.jpg') ;
@@ -402,7 +402,7 @@ log;
log LOG130;
sh rm t130_extract_command;
-sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_anoush.jpg');/g" >> t130_extract_command;
+sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_anoush.jpg');/g" >> t130_extract_command;
obey t130_extract_command;
@@ -420,7 +420,7 @@ drop table tlob130bin3;
drop table tlob130bt;
sh rm tlob130_deep2.jpg;
sh rm tlob130_txt1.txt;
-sh regrhadoop.ksh fs -rm /lobs/tlob130_anoush2.jpg;
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/tlob130_anoush2.jpg;
sh rm tlob130_anoush3.jpg;
@@ -453,7 +453,7 @@ log;
log LOG130;
sh rm t130_extract_command;
-sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_anoush2.jpg');/g" >> t130_extract_command;
+sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_anoush2.jpg');/g" >> t130_extract_command;
obey t130_extract_command;
-- combination blob and clob columns
@@ -524,22 +524,22 @@ select * from table(lob stats(tlob130gt));
?section lob_external
--test external lobs
create table tlob130ext (c1 int not null, c2 blob, c3 clob, c4 blob storage 'external', primary key (c1));
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///lobs/lob_input_b1.txt'),externaltolob('hdfs:///lobs/lob_input_b1.txt'));
-insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///lobs/lob_input_c1.txt'),externaltolob('hdfs:///lobs/lob_input_c1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'));
+insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'));
--negative cases
-insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),filetolob('hdfs:///lobs/lob_input_a1.txt'));
-update tlob130ext set c4=stringtolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
-update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
-update tlob130ext set c3=externaltolob('hdfs:///lobs/lob_input_b1.txt') where c1=1;
+insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+update tlob130ext set c4=stringtolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
+update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
+update tlob130ext set c3=externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt') where c1=1;
update tlob130ext set c4=stringtolob('cannot allow this') where c1=1;
delete from tlob130ext where c1=1;
-insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
-- the next one should see error 8557
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),
-externaltolob('hdfs:///lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),
+externaltolob('hdfs:///user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c4,50) from tlob130ext;
update tlob130ext set c3=stringtolob('can allow this') where c1=1;
@@ -547,11 +547,11 @@ select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c4,50) from tlob130ext;
-update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_d1.txt') where c1=2;
+update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_d1.txt') where c1=2;
select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c4,50) from tlob130ext;
-update tlob130ext set c2=filetolob('hdfs:///lobs/lob_input_b1.txt') where c1=2;
+update tlob130ext set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt') where c1=2;
select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c3,50) from tlob130ext;
get lob stats for table tlob130ext;
select * from table(lob stats(tlob130ext));
@@ -585,7 +585,7 @@ select c2 from tlob130ext;
-- following should return error since only external lobs will be allowed
update tlob130ext set c4=stringtolob('gggg');
-- following should work.
-update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_b1.txt');
+update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt');
delete from tlob130ext;
-- test to ensure all lob dependent tables and schemas containing lob tables
@@ -611,13 +611,13 @@ set schema trafodion.lob130;
?section lob_general_cleanup
log;
sh rm t130_*;
-sh regrhadoop.ksh fs -rm /lobs/tlob130*
-sh regrhadoop.ksh fs -rm /lobs/lobinput_a1.txt
-sh regrhadoop.ksh fs -rm /lobs/lobinput_b1.txt
-sh regrhadoop.ksh fs -rm /lobs/lobinput_c1.txt
-sh regrhadoop.ksh fs -rm /lobs/lobinput_d1.txt
-sh regrhadoop.ksh fs -rm /lobs/lobinput_e1.txt
-sh regrhadoop.ksh fs -rm /lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/tlob130*
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/lobinput_a1.txt
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/lobinput_b1.txt
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/lobinput_c1.txt
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/lobinput_d1.txt
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/lobinput_e1.txt
+sh regrhadoop.ksh fs -rm /user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
sh rm lobinput_*;
sh rm deep.jpg;
sh rm anoush.jpg;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/EXPECTED003
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/EXPECTED003 b/core/sql/regress/hive/EXPECTED003
index 456b150..dd899c8 100644
--- a/core/sql/regress/hive/EXPECTED003
+++ b/core/sql/regress/hive/EXPECTED003
@@ -191,7 +191,7 @@ P_PROMO_SK P_PROMO_ID P_START_DATE_SK P_END_DATE_SK P_ITEM_S
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491850476, failedModTS = 1491850478, failedLoc = hdfs://localhost:30200/user/hive/exttables/ins_customer_address
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493660577, failedModTS = 1493660579, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/ins_customer_address
(EXPR)
--------------------
@@ -587,7 +587,7 @@ test lp bug # 1355477
--- 12 row(s) selected.
>>
>>-- hadoop ls should return 2 files
->>-- sh regrhadoop.ksh fs -ls /user/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
+>>-- sh regrhadoop.ksh fs -ls /user/trafodion/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
>>log;
2
1c1
@@ -651,7 +651,7 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
---
> *** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
>
-> *** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491850590, failedModTS = 1491850614, failedLoc = hdfs://localhost:30200/user/hive/exttables/ins_store_sales_summary
+> *** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493660691, failedModTS = 1493660717, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/ins_store_sales_summary
>
> SS_SOLD_DATE_SK SS_STORE_SK SS_QUANTITY
> --------------- ----------- -----------
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/EXPECTED005
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/EXPECTED005 b/core/sql/regress/hive/EXPECTED005
index 16662ec..909b448 100644
--- a/core/sql/regress/hive/EXPECTED005
+++ b/core/sql/regress/hive/EXPECTED005
@@ -72,7 +72,7 @@
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514410, failedModTS = 1491514487, failedLoc = hdfs://localhost:30200/user/hive/exttables/customer_ddl
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661048, failedModTS = 1493661158, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/customer_ddl
C_PREFERRED_CUST_FLAG (EXPR)
------------------------- --------------------
@@ -108,7 +108,7 @@ Y 9525
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514410, failedModTS = 1491514487, failedLoc = hdfs://localhost:30200/user/hive/exttables/customer_ddl
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661048, failedModTS = 1493661158, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/customer_ddl
C_PREFERRED_CUST_FLAG (EXPR)
------------------------- --------------------
@@ -177,7 +177,7 @@ Y 9525
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514491, failedModTS = 1491514501, failedLoc = hdfs://localhost:30200/user/hive/warehouse/newtable
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661163, failedModTS = 1493661174, failedLoc = hdfs://localhost:25600/user/hive/warehouse/newtable
A
-------------------------
@@ -202,7 +202,7 @@ xyz
>>-- add a second partition to customer_bp
>>sh regrhive.ksh -v -f $REGRTSTDIR/TEST005_c.hive.sql;
>>-- add more files to customer_ddl
->>sh regrhadoop.ksh dfs -cp /user/hive/exttables/customer_temp/* /user/hive/exttables/customer_ddl;
+>>sh regrhadoop.ksh dfs -cp /user/trafodion/hive/exttables/customer_temp/* /user/trafodion/hive/exttables/customer_ddl;
>>
>>-- no query cache hit, but NATable cache hit
>>prepare s3 from
@@ -227,7 +227,7 @@ xyz
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514487, failedModTS = 1491514524, failedLoc = hdfs://localhost:30200/user/hive/exttables/customer_ddl
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661158, failedModTS = 1493661205, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/customer_ddl
C_PREFERRED_CUST_FLAG (EXPR)
------------------------- --------------------
@@ -242,7 +242,7 @@ Y 18984
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514487, failedModTS = 1491514524, failedLoc = hdfs://localhost:30200/user/hive/exttables/customer_ddl
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661158, failedModTS = 1493661205, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/customer_ddl
C_PREFERRED_CUST_FLAG (EXPR)
------------------------- --------------------
@@ -298,7 +298,7 @@ Y 18984
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514521, failedModTS = 1491514532, failedLoc = hdfs://localhost:30200/user/hive/warehouse/newtable
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661199, failedModTS = 1493661218, failedLoc = hdfs://localhost:25600/user/hive/warehouse/newtable
A B
----------- -------------------------
@@ -617,18 +617,18 @@ C1 C2 C3 C4 C
--- 1 row(s) selected.
>>load with continue on error into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
Task: LOAD Status: Started Object: TRAFODION.SEABASE.TRAF_TBL_BAD
-Task: CLEANUP Status: Started Time: 2017-04-06 21:36:49.645
-Task: CLEANUP Status: Ended Time: 2017-04-06 21:36:49.665
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.020
-Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:49.665
+Task: CLEANUP Status: Started Time: 2017-05-01 17:54:50.429
+Task: CLEANUP Status: Ended Time: 2017-05-01 17:54:50.445
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.015
+Task: LOADING DATA Status: Started Time: 2017-05-01 17:54:50.445
Rows Processed: 8
Error Rows: 5
-Task: LOADING DATA Status: Ended Time: 2017-04-06 21:36:50.106
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:00.441
-Task: COMPLETION Status: Started Time: 2017-04-06 21:36:50.107
+Task: LOADING DATA Status: Ended Time: 2017-05-01 17:54:50.905
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:00.460
+Task: COMPLETION Status: Started Time: 2017-05-01 17:54:50.905
Rows Loaded: 3
-Task: COMPLETION Status: Ended Time: 2017-04-06 21:36:51.349
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.928
+Task: COMPLETION Status: Ended Time: 2017-05-01 17:54:51.575
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.670
--- 3 row(s) loaded.
>>select count(*) from trafodion.seabase.traf_tbl_bad;
@@ -644,19 +644,19 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.928
--- 3 row(s) deleted.
>>load with log error rows into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
Task: LOAD Status: Started Object: TRAFODION.SEABASE.TRAF_TBL_BAD
-Task: CLEANUP Status: Started Time: 2017-04-06 21:36:52.131
-Task: CLEANUP Status: Ended Time: 2017-04-06 21:36:52.143
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.012
- Logging Location: /bulkload/logs/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170406_213652
-Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:52.143
+Task: CLEANUP Status: Started Time: 2017-05-01 17:54:52.708
+Task: CLEANUP Status: Ended Time: 2017-05-01 17:54:52.726
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.019
+ Logging Location: /user/trafodion/bulkload/logs/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170501_175452
+Task: LOADING DATA Status: Started Time: 2017-05-01 17:54:52.726
Rows Processed: 8
Error Rows: 5
-Task: LOADING DATA Status: Ended Time: 2017-04-06 21:36:52.719
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:00.575
-Task: COMPLETION Status: Started Time: 2017-04-06 21:36:52.719
+Task: LOADING DATA Status: Ended Time: 2017-05-01 17:54:53.423
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:00.697
+Task: COMPLETION Status: Started Time: 2017-05-01 17:54:53.423
Rows Loaded: 3
-Task: COMPLETION Status: Ended Time: 2017-04-06 21:36:53.345
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.627
+Task: COMPLETION Status: Ended Time: 2017-05-01 17:54:55.924
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:02.501
--- 3 row(s) loaded.
>>select count(*) from trafodion.seabase.traf_tbl_bad;
@@ -667,21 +667,21 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.627
3
--- 1 row(s) selected.
->>load with log error rows to '/bulkload/logs/TEST005' into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
+>>load with log error rows to '/user/trafodion/bulkload/logs/TEST005' into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
Task: LOAD Status: Started Object: TRAFODION.SEABASE.TRAF_TBL_BAD
-Task: CLEANUP Status: Started Time: 2017-04-06 21:36:54.360
-Task: CLEANUP Status: Ended Time: 2017-04-06 21:36:54.374
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.014
- Logging Location: /bulkload/logs/TEST005/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170406_213654
-Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:54.374
+Task: CLEANUP Status: Started Time: 2017-05-01 17:54:56.974
+Task: CLEANUP Status: Ended Time: 2017-05-01 17:54:56.990
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.015
+ Logging Location: /user/trafodion/bulkload/logs/TEST005/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170501_175456
+Task: LOADING DATA Status: Started Time: 2017-05-01 17:54:56.990
Rows Processed: 8
Error Rows: 5
-Task: LOADING DATA Status: Ended Time: 2017-04-06 21:36:55.492
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:01.117
-Task: COMPLETION Status: Started Time: 2017-04-06 21:36:55.492
+Task: LOADING DATA Status: Ended Time: 2017-05-01 17:54:57.861
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:00.871
+Task: COMPLETION Status: Started Time: 2017-05-01 17:54:57.861
Rows Loaded: 3
-Task: COMPLETION Status: Ended Time: 2017-04-06 21:36:55.875
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.383
+Task: COMPLETION Status: Ended Time: 2017-05-01 17:54:58.546
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.685
--- 3 row(s) loaded.
>>select count(*) from trafodion.seabase.traf_tbl_bad;
@@ -697,10 +697,10 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.383
--- 6 row(s) deleted.
>>load with stop after 3 error rows into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
Task: LOAD Status: Started Object: TRAFODION.SEABASE.TRAF_TBL_BAD
-Task: CLEANUP Status: Started Time: 2017-04-06 21:36:56.927
-Task: CLEANUP Status: Ended Time: 2017-04-06 21:36:56.945
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.018
-Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:56.945
+Task: CLEANUP Status: Started Time: 2017-05-01 17:54:59.692
+Task: CLEANUP Status: Ended Time: 2017-05-01 17:54:59.705
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.013
+Task: LOADING DATA Status: Started Time: 2017-05-01 17:54:59.705
*** ERROR[8113] The maximum number of error rows is exceeded.
@@ -715,11 +715,11 @@ Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:56.945
--- 1 row(s) selected.
>>load with log error rows, stop after 3 error rows into trafodion.seabase.traf_tbl_bad select * from tbl_bad;
Task: LOAD Status: Started Object: TRAFODION.SEABASE.TRAF_TBL_BAD
-Task: CLEANUP Status: Started Time: 2017-04-06 21:36:58.356
-Task: CLEANUP Status: Ended Time: 2017-04-06 21:36:58.374
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.017
- Logging Location: /bulkload/logs/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170406_213658
-Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:58.374
+Task: CLEANUP Status: Started Time: 2017-05-01 17:55:01.163
+Task: CLEANUP Status: Ended Time: 2017-05-01 17:55:01.174
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.011
+ Logging Location: /user/trafodion/bulkload/logs/ERR_TRAFODION.SEABASE.TRAF_TBL_BAD_20170501_175501
+Task: LOADING DATA Status: Started Time: 2017-05-01 17:55:01.174
*** ERROR[8113] The maximum number of error rows is exceeded.
@@ -807,7 +807,7 @@ Task: LOADING DATA Status: Started Time: 2017-04-06 21:36:58.374
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514634, failedModTS = 1491514650, failedLoc = hdfs://localhost:30200/user/hive/warehouse/thive
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661327, failedModTS = 1493661347, failedLoc = hdfs://localhost:25600/user/hive/warehouse/thive
A
-----------
@@ -842,7 +842,7 @@ A
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514654, failedModTS = 1491514672, failedLoc = hdfs://localhost:30200/user/hive/warehouse/thive
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661349, failedModTS = 1493661370, failedLoc = hdfs://localhost:25600/user/hive/warehouse/thive
A
-----------
@@ -869,7 +869,7 @@ A B
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514672, failedModTS = 1491514677, failedLoc = hdfs://localhost:30200/user/hive/warehouse/thive
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661370, failedModTS = 1493661376, failedLoc = hdfs://localhost:25600/user/hive/warehouse/thive
--- 0 row(s) selected.
>>insert into hive.hive.thive values (10, 20);
@@ -893,7 +893,7 @@ A B
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491514679, failedModTS = 1491514680, failedLoc = hdfs://localhost:30200/user/hive/warehouse/thive
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493661378, failedModTS = 1493661381, failedLoc = hdfs://localhost:25600/user/hive/warehouse/thive
--- 0 row(s) selected.
>>
@@ -985,7 +985,7 @@ t005part.a t005part.b t005part.c
>>invoke hive.hive.thive_insert_smallint;
-- Definition of hive table THIVE_INSERT_SMALLINT
--- Definition current Thu Apr 6 21:40:46 2017
+-- Definition current Mon May 1 17:59:39 2017
(
A SMALLINT
@@ -1090,7 +1090,7 @@ A
>>invoke hive.hive.thive_insert_varchar;
-- Definition of hive table THIVE_INSERT_VARCHAR
--- Definition current Thu Apr 6 21:40:54 2017
+-- Definition current Mon May 1 17:59:52 2017
(
A VARCHAR(1 CHAR) CHARACTER SET UTF8 COLLATE
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/EXPECTED009
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/EXPECTED009 b/core/sql/regress/hive/EXPECTED009
index b0066a1..b56099f 100644
--- a/core/sql/regress/hive/EXPECTED009
+++ b/core/sql/regress/hive/EXPECTED009
@@ -264,7 +264,7 @@ A B C
>>-- this causes the external table to be invalid
>>
>>-- cleanup data from the old table, and create/load data with additional column
->>sh regrhadoop.ksh fs -rm /user/hive/exttables/t009t1/*;
+>>sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/t009t1/*;
>>sh regrhive.ksh -v -f $REGRTSTDIR/TEST009_b.hive.sql &> $REGRRUNDIR/LOG009_b.hive.log;
>>
>>-- should fail - column mismatch
@@ -349,7 +349,7 @@ T009T2
>>invoke hive.sch_t009.t009t1;
-- Definition of hive table T009T1
--- Definition current Fri Mar 17 07:19:26 2017
+-- Definition current Mon May 1 18:17:29 2017
(
A INT
@@ -419,7 +419,7 @@ _HB__CELL__
>>invoke trafodion."_HB__ROW__"."baseball";
-- Definition of Trafodion external table TRAFODION."_HB__ROW__"."baseball"
--- Definition current Fri Mar 17 07:19:38 2017
+-- Definition current Mon May 1 18:17:41 2017
(
ROW_ID VARCHAR(100) CHARACTER SET ISO88591
@@ -432,7 +432,7 @@ _HB__CELL__
>>invoke trafodion."_HB__CELL__"."baseball";
-- Definition of Trafodion external table TRAFODION."_HB__CELL__"."baseball"
--- Definition current Fri Mar 17 07:19:39 2017
+-- Definition current Mon May 1 18:17:42 2017
(
ROW_ID VARCHAR(100) CHARACTER SET ISO88591
@@ -521,7 +521,7 @@ ROW_ID COLS
>>invoke hive.hive.store_sales;
-- Definition of hive table STORE_SALES
--- Definition current Fri Mar 17 07:19:57 2017
+-- Definition current Mon May 1 18:18:05 2017
(
SS_SOLD_DATE_SK INT
@@ -565,7 +565,7 @@ ROW_ID COLS
------------------------------------------------------------------ PLAN SUMMARY
MODULE_NAME .............. DYNAMICALLY COMPILED
STATEMENT_NAME ........... S
-PLAN_ID .................. 212356495197335919
+PLAN_ID .................. 212360422685553602
ROWS_OUT ............. 1,709
EST_TOTAL_COST ........... 0.27
STATEMENT ................ select * from store_sales where ss_item_sk = 1;
@@ -694,7 +694,7 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
------------------------------------------------------------------ PLAN SUMMARY
MODULE_NAME .............. DYNAMICALLY COMPILED
STATEMENT_NAME ........... S
-PLAN_ID .................. 212356495197600410
+PLAN_ID .................. 212360422686017142
ROWS_OUT ......... 2,920,643
EST_TOTAL_COST ........... 1.07
STATEMENT ................ select *
@@ -896,7 +896,7 @@ DESCRIPTION
>>invoke hive.hive.date_dim;
-- Definition of hive table DATE_DIM
--- Definition current Fri Mar 17 07:20:06 2017
+-- Definition current Mon May 1 18:18:15 2017
(
D_DATE_SK INT
@@ -1039,7 +1039,7 @@ CREATE EXTERNAL TABLE DATE_DIM
------------------------------------------------------------------ PLAN SUMMARY
MODULE_NAME .............. DYNAMICALLY COMPILED
STATEMENT_NAME ........... S
-PLAN_ID .................. 212356495214064420
+PLAN_ID .................. 212360422699701203
ROWS_OUT ................. 1
EST_TOTAL_COST ........... 0.01
STATEMENT ................ select *
@@ -1130,7 +1130,7 @@ DESCRIPTION
>>invoke hive.hive.date_dim;
-- Definition of hive table DATE_DIM
--- Definition current Fri Mar 17 07:20:22 2017
+-- Definition current Mon May 1 18:18:30 2017
(
D_DATE_SK INT
@@ -1273,7 +1273,7 @@ CREATE EXTERNAL TABLE DATE_DIM
------------------------------------------------------------------ PLAN SUMMARY
MODULE_NAME .............. DYNAMICALLY COMPILED
STATEMENT_NAME ........... S
-PLAN_ID .................. 212356495226927260
+PLAN_ID .................. 212360422714471702
ROWS_OUT ................. 1
EST_TOTAL_COST ........... 0.01
STATEMENT ................ select *
[5/8] incubator-trafodion git commit: Expected file change for TEST130
Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/a14a3ee1/core/sql/regress/executor/TEST130
----------------------------------------------------------------------
diff --git a/core/sql/regress/executor/TEST130 b/core/sql/regress/executor/TEST130
index 6f174a3..b9c3e2c 100755
--- a/core/sql/regress/executor/TEST130
+++ b/core/sql/regress/executor/TEST130
@@ -117,7 +117,7 @@ delete from t130lob2;
--test insert select from a source hive varchar column to lob
delete from t130lob2;
insert into t130lob2 select [first 10] d_date_sk,d_date_id from hive.hive.date_dim;
-select lobtostring(c2,40) from t130lob2 order by c1;
+select lobtostring(c2,4) from t130lob2 order by c1;
delete from t130lob2;
[4/8] incubator-trafodion git commit: Merge remote branch
'origin/master' into hdfs_chdir_work
Posted by sa...@apache.org.
Merge remote branch 'origin/master' into hdfs_chdir_work
Conflicts:
core/sql/regress/executor/TEST130
Project: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/commit/4b27589a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/tree/4b27589a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-trafodion/diff/4b27589a
Branch: refs/heads/master
Commit: 4b27589acaf0ab0861f3a892735ea2ce9b165d27
Parents: f094aa0 d0c4d73
Author: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Authored: Tue May 2 17:29:42 2017 +0000
Committer: Sandhya Sundaresan <sa...@edev08.esgyn.local>
Committed: Tue May 2 17:29:42 2017 +0000
----------------------------------------------------------------------
core/sql/arkcmp/CmpStatement.cpp | 23 +-
core/sql/bin/clitest.cpp | 2 +-
core/sql/cli/Statement.cpp | 3 +
core/sql/cli/sqlcli.h | 5 +-
core/sql/clitest/blobtest.cpp | 4 +-
core/sql/comexe/CmpMessage.h | 10 +-
core/sql/common/ComSmallDefs.h | 1 +
core/sql/executor/ExExeUtilCli.cpp | 25 +-
core/sql/executor/ExExeUtilCli.h | 3 +-
core/sql/executor/ExExeUtilExplain.cpp | 20 +-
core/sql/executor/ExExeUtilLoad.cpp | 2 +-
core/sql/exp/ExpLOB.cpp | 103 +++---
core/sql/exp/ExpLOB.h | 21 +-
core/sql/exp/ExpLOBaccess.cpp | 213 +++++++++++--
core/sql/exp/ExpLOBaccess.h | 22 +-
core/sql/exp/ExpLOBenums.h | 17 +-
core/sql/exp/ExpLOBinterface.cpp | 75 +++--
core/sql/exp/ExpLOBinterface.h | 34 +-
core/sql/generator/GenItemFunc.cpp | 7 +-
core/sql/generator/GenPreCode.cpp | 52 ++-
core/sql/optimizer/ItemFunc.h | 2 +-
core/sql/parser/SqlParserGlobalsCmn.h | 84 +----
core/sql/parser/SqlParserGlobalsEnum.h | 119 +++++++
core/sql/regress/executor/EXPECTED130 | 469 +++++++++++-----------------
core/sql/regress/executor/TEST130 | 39 ++-
core/sql/regress/privs1/EXPECTED120 | 16 +
core/sql/regress/privs1/TEST120 | 3 +
core/sql/sqlcomp/CmpMain.cpp | 10 +-
core/sql/sqlcomp/CmpMain.h | 3 +-
core/sql/sqlcomp/nadefaults.cpp | 2 +-
licenses/lic-pyinstall-src | 4 +-
31 files changed, 825 insertions(+), 568 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/4b27589a/core/sql/regress/executor/TEST130
----------------------------------------------------------------------
diff --cc core/sql/regress/executor/TEST130
index bdcd6a1,b98f428..6f174a3
--- a/core/sql/regress/executor/TEST130
+++ b/core/sql/regress/executor/TEST130
@@@ -524,22 -548,24 +548,26 @@@ select * from table(lob stats(tlob130gt
?section lob_external
--test external lobs
create table tlob130ext (c1 int not null, c2 blob, c3 clob, c4 blob storage 'external', primary key (c1));
++
+ create table tlob130ext2 (c1 int not null, c2 blob, c3 clob, c4 blob storage 'external', primary key (c1));
+ create table tlob130_not_external (c1 int not null , c2 blob, c3 blob, c4 blob, primary key (c1));
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///lobs/lob_input_b1.txt'),externaltolob('hdfs:///lobs/lob_input_b1.txt'));
-insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///lobs/lob_input_c1.txt'),externaltolob('hdfs:///lobs/lob_input_c1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'));
+insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'));
++
--negative cases
-insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),filetolob('hdfs:///lobs/lob_input_a1.txt'));
-update tlob130ext set c4=stringtolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
-update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
-update tlob130ext set c3=externaltolob('hdfs:///lobs/lob_input_b1.txt') where c1=1;
+insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+update tlob130ext set c4=stringtolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
+update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
+update tlob130ext set c3=externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt') where c1=1;
update tlob130ext set c4=stringtolob('cannot allow this') where c1=1;
delete from tlob130ext where c1=1;
-insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
-- the next one should see error 8557
-insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),
-externaltolob('hdfs:///lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
+insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),
+externaltolob('hdfs:///user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c4,50) from tlob130ext;
update tlob130ext set c3=stringtolob('can allow this') where c1=1;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/4b27589a/core/sql/sqlcomp/nadefaults.cpp
----------------------------------------------------------------------
diff --cc core/sql/sqlcomp/nadefaults.cpp
index 38b1feb,19fe1ac..0ab0f28
--- a/core/sql/sqlcomp/nadefaults.cpp
+++ b/core/sql/sqlcomp/nadefaults.cpp
@@@ -2090,10 -2090,10 +2090,10 @@@ SDDkwd__(ISO_MAPPING, (char *
// (unused)default size is 32000. Change this to extract more data into memory.
DDui___(LOB_OUTPUT_SIZE, "32000"),
- DD_____(LOB_STORAGE_FILE_DIR, "/lobs"),
+ DD_____(LOB_STORAGE_FILE_DIR, "/user/trafodion/lobs"),
// storage types defined in exp/ExpLOBenum.h.
- // Default is hdfs_file (value = 1)
+ // Default is hdfs_file (value = 2)
DDint__(LOB_STORAGE_TYPE, "2"),
//New default size for buffer size for local node
[2/8] incubator-trafodion git commit: Changes to move all Trafodion
created hdfs files under /user/trafodion
Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/EXPECTED018
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/EXPECTED018 b/core/sql/regress/hive/EXPECTED018
index e05f25a..b2ba64c 100644
--- a/core/sql/regress/hive/EXPECTED018
+++ b/core/sql/regress/hive/EXPECTED018
@@ -146,18 +146,18 @@
>>load with no recovery into customer_address
+>select * from hive.hive.customer_address;
Task: LOAD Status: Started Object: TRAFODION.HBASE.CUSTOMER_ADDRESS
-Task: CLEANUP Status: Started Time: 2017-04-06 22:12:34.718
-Task: CLEANUP Status: Ended Time: 2017-04-06 22:12:34.740
+Task: CLEANUP Status: Started Time: 2017-05-01 18:46:29.746
+Task: CLEANUP Status: Ended Time: 2017-05-01 18:46:29.768
Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.022
-Task: LOADING DATA Status: Started Time: 2017-04-06 22:12:34.740
+Task: LOADING DATA Status: Started Time: 2017-05-01 18:46:29.768
Rows Processed: 50000
Error Rows: 0
-Task: LOADING DATA Status: Ended Time: 2017-04-06 22:12:44.470
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:09.306
-Task: COMPLETION Status: Started Time: 2017-04-06 22:12:44.471
+Task: LOADING DATA Status: Ended Time: 2017-05-01 18:46:39.501
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:09.236
+Task: COMPLETION Status: Started Time: 2017-05-01 18:46:39.509
Rows Loaded: 50000
-Task: COMPLETION Status: Ended Time: 2017-04-06 22:12:44.324
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.277
+Task: COMPLETION Status: Ended Time: 2017-05-01 18:46:39.346
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.341
--- 50000 row(s) loaded.
>>--
@@ -182,18 +182,18 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.277
>>load with no recovery into customer_demographics
+>select * from hive.hive.customer_demographics where cd_demo_sk <= 20000;
Task: LOAD Status: Started Object: TRAFODION.HBASE.CUSTOMER_DEMOGRAPHICS
-Task: CLEANUP Status: Started Time: 2017-04-06 22:12:49.421
-Task: CLEANUP Status: Ended Time: 2017-04-06 22:12:49.444
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.023
-Task: LOADING DATA Status: Started Time: 2017-04-06 22:12:49.444
+Task: CLEANUP Status: Started Time: 2017-05-01 18:46:42.390
+Task: CLEANUP Status: Ended Time: 2017-05-01 18:46:42.404
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.014
+Task: LOADING DATA Status: Started Time: 2017-05-01 18:46:42.404
Rows Processed: 20000
Error Rows: 0
-Task: LOADING DATA Status: Ended Time: 2017-04-06 22:13:02.917
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:12.647
-Task: COMPLETION Status: Started Time: 2017-04-06 22:13:02.920
+Task: LOADING DATA Status: Ended Time: 2017-05-01 18:46:55.199
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:12.795
+Task: COMPLETION Status: Started Time: 2017-05-01 18:46:55.199
Rows Loaded: 20000
-Task: COMPLETION Status: Ended Time: 2017-04-06 22:13:02.456
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.364
+Task: COMPLETION Status: Ended Time: 2017-05-01 18:46:55.606
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.407
--- 20000 row(s) loaded.
>>--
@@ -219,18 +219,18 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.364
>>load with no recovery into customer_demographics_salt
+>select * from hive.hive.customer_demographics where cd_demo_sk <= 20000;
Task: LOAD Status: Started Object: TRAFODION.HBASE.CUSTOMER_DEMOGRAPHICS_SALT
-Task: CLEANUP Status: Started Time: 2017-04-06 22:13:07.339
-Task: CLEANUP Status: Ended Time: 2017-04-06 22:13:07.489
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.015
-Task: LOADING DATA Status: Started Time: 2017-04-06 22:13:07.489
+Task: CLEANUP Status: Started Time: 2017-05-01 18:47:00.293
+Task: CLEANUP Status: Ended Time: 2017-05-01 18:47:00.311
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.018
+Task: LOADING DATA Status: Started Time: 2017-05-01 18:47:00.311
Rows Processed: 20000
Error Rows: 0
-Task: LOADING DATA Status: Ended Time: 2017-04-06 22:13:16.513
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:09.465
-Task: COMPLETION Status: Started Time: 2017-04-06 22:13:16.513
+Task: LOADING DATA Status: Ended Time: 2017-05-01 18:47:10.603
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:10.292
+Task: COMPLETION Status: Started Time: 2017-05-01 18:47:10.603
Rows Loaded: 20000
-Task: COMPLETION Status: Ended Time: 2017-04-06 22:13:16.803
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.290
+Task: COMPLETION Status: Ended Time: 2017-05-01 18:47:11.346
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.431
--- 20000 row(s) loaded.
>>--
@@ -246,18 +246,18 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.290
>>load with no recovery into customer_salt
+>select * from hive.hive.customer;
Task: LOAD Status: Started Object: TRAFODION.HBASE.CUSTOMER_SALT
-Task: CLEANUP Status: Started Time: 2017-04-06 22:13:19.244
-Task: CLEANUP Status: Ended Time: 2017-04-06 22:13:19.256
-Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.012
-Task: LOADING DATA Status: Started Time: 2017-04-06 22:13:19.256
+Task: CLEANUP Status: Started Time: 2017-05-01 18:47:13.570
+Task: CLEANUP Status: Ended Time: 2017-05-01 18:47:13.584
+Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.015
+Task: LOADING DATA Status: Started Time: 2017-05-01 18:47:13.584
Rows Processed: 100000
Error Rows: 0
-Task: LOADING DATA Status: Ended Time: 2017-04-06 22:13:31.650
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:11.808
-Task: COMPLETION Status: Started Time: 2017-04-06 22:13:31.651
+Task: LOADING DATA Status: Ended Time: 2017-05-01 18:47:25.306
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:11.722
+Task: COMPLETION Status: Started Time: 2017-05-01 18:47:25.306
Rows Loaded: 100000
-Task: COMPLETION Status: Ended Time: 2017-04-06 22:13:31.364
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.299
+Task: COMPLETION Status: Ended Time: 2017-05-01 18:47:25.645
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.339
--- 100000 row(s) loaded.
>>--
@@ -282,18 +282,18 @@ Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.299
>>load with no recovery into store_sales_salt
+>select * from hive.hive.store_sales where ss_item_sk <= 1000;
Task: LOAD Status: Started Object: TRAFODION.HBASE.STORE_SALES_SALT
-Task: CLEANUP Status: Started Time: 2017-04-06 22:13:35.969
-Task: CLEANUP Status: Ended Time: 2017-04-06 22:13:35.986
+Task: CLEANUP Status: Started Time: 2017-05-01 18:47:28.700
+Task: CLEANUP Status: Ended Time: 2017-05-01 18:47:28.717
Task: CLEANUP Status: Ended Elapsed Time: 00:00:00.017
-Task: LOADING DATA Status: Started Time: 2017-04-06 22:13:35.986
+Task: LOADING DATA Status: Started Time: 2017-05-01 18:47:28.717
Rows Processed: 160756
Error Rows: 0
-Task: LOADING DATA Status: Ended Time: 2017-04-06 22:13:50.490
-Task: LOADING DATA Status: Ended Elapsed Time: 00:00:14.504
-Task: COMPLETION Status: Started Time: 2017-04-06 22:13:50.490
+Task: LOADING DATA Status: Ended Time: 2017-05-01 18:47:42.429
+Task: LOADING DATA Status: Ended Elapsed Time: 00:00:13.711
+Task: COMPLETION Status: Started Time: 2017-05-01 18:47:42.429
Rows Loaded: 160756
-Task: COMPLETION Status: Ended Time: 2017-04-06 22:13:50.824
-Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.334
+Task: COMPLETION Status: Ended Time: 2017-05-01 18:47:42.753
+Task: COMPLETION Status: Ended Elapsed Time: 00:00:00.324
--- 160756 row(s) loaded.
>>--
@@ -377,16 +377,16 @@ a
>>
>>-- using unload
>>unload with purgedata from target
-+> into '/user/hive/exttables/null_format_default'
++> into '/user/trafodion/hive/exttables/null_format_default'
+> select * from null_format_src;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:14:00.918
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:14:00.164
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.007
-Task: EXTRACT Status: Started Time: 2017-04-06 22:14:00.164
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:47:51.294
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:47:51.296
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.002
+Task: EXTRACT Status: Started Time: 2017-05-01 18:47:51.296
Rows Processed: 10
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:14:00.442
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.426
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:47:51.439
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.142
--- 10 row(s) unloaded.
>>select * from hive.hive.null_format_default;
@@ -408,16 +408,16 @@ a
--- 10 row(s) selected.
>>
>>unload with purgedata from target
-+> into '/user/hive/exttables/null_format_empty'
++> into '/user/trafodion/hive/exttables/null_format_empty'
+> select * from null_format_src;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:14:01.304
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:14:01.308
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.004
-Task: EXTRACT Status: Started Time: 2017-04-06 22:14:01.308
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:47:52.194
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:47:52.195
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.002
+Task: EXTRACT Status: Started Time: 2017-05-01 18:47:52.195
Rows Processed: 10
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:14:01.529
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.221
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:47:52.297
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.102
--- 10 row(s) unloaded.
>>select * from hive.hive.null_format_empty;
@@ -439,16 +439,16 @@ a ?
--- 10 row(s) selected.
>>
>>unload with purgedata from target
-+> into '/user/hive/exttables/null_format_colon'
++> into '/user/trafodion/hive/exttables/null_format_colon'
+> select * from null_format_src;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:14:03.674
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:14:03.680
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.007
-Task: EXTRACT Status: Started Time: 2017-04-06 22:14:03.680
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:47:53.342
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:47:53.357
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.001
+Task: EXTRACT Status: Started Time: 2017-05-01 18:47:53.358
Rows Processed: 10
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:14:03.778
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.098
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:47:53.305
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.270
--- 10 row(s) unloaded.
>>select * from hive.hive.null_format_colon;
@@ -488,7 +488,7 @@ a
>>
>>--exp1
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/bulkload/customer_address'
++>UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_address'
+>select * from trafodion.hbase.customer_address
+><<+ cardinality 10e10 >>;
@@ -506,24 +506,24 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
+> PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
+>MERGE FILE 'merged_customer_address.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_address'
++>INTO '/user/trafodion/bulkload/customer_address'
+>select * from trafodion.hbase.customer_address
+>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:14:57.526
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:14:57.532
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.006
-Task: EXTRACT Status: Started Time: 2017-04-06 22:14:57.532
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:01.614
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:01.615
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.001
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:01.615
Rows Processed: 50000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:14:58.628
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.096
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:14:58.628
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:14:58.678
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.050
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:02.955
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.340
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:02.955
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:02.995
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.040
--- 50000 row(s) unloaded.
>>log;
-regrhadoop.ksh fs -copyToLocal /bulkload/customer_address/merged_customer_address.gz /tmp
+regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_address/merged_customer_address.gz /tmp
gunzip -f /tmp/merged_customer_address.gz
cat /tmp/merged_customer_address | wc -l
50000
@@ -547,24 +547,24 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics'
++>INTO '/user/trafodion/bulkload/customer_demographics'
+>select * from trafodion.hbase.customer_demographics
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:02.643
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:02.651
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:06.594
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:06.602
Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:02.651
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:06.602
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:03.182
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.531
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:03.182
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:03.208
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.026
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:07.141
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.539
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:07.142
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:07.178
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.037
--- 20000 row(s) unloaded.
>>log;
-regrhadoop.ksh fs -copyToLocal /bulkload/customer_demographics/merged_customer_demogs.gz /tmp
+regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_demographics/merged_customer_demogs.gz /tmp
gunzip -f /tmp/merged_customer_demogs.gz
cat /tmp/merged_customer_demogs | wc -l
20000
@@ -574,29 +574,29 @@ cat /tmp/merged_customer_demogs | wc -l
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs_2' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics'
++>INTO '/user/trafodion/bulkload/customer_demographics'
+>select * from trafodion.hbase.customer_demographics
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:06.683
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:06.695
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.012
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:06.695
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:10.514
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:10.521
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.007
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:10.521
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:07.116
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.316
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:07.118
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:07.390
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.027
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:10.860
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.339
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:10.860
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:10.903
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.043
--- 20000 row(s) unloaded.
>>log;
-regrhadoop.ksh fs -cat /bulkload/customer_demographics/merged_customer_demogs_2 | wc -l
+regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics/merged_customer_demogs_2 | wc -l
20000
>>----------------------------------
>>--exp 3
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/bulkload/customer_demographics_salt'
++>UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
@@ -618,52 +618,51 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:12.414
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:12.419
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:12.419
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:16.748
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:16.761
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.001
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:16.761
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:13.186
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.767
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:13.186
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:13.222
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.036
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:17.195
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.943
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:17.196
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:17.724
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.053
--- 20000 row(s) unloaded.
>>
>>log;
-regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_3
-778224 778224 /bulkload/customer_demographics_salt/merged_customer_demogs_3
+regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_3
>>-------------------
>>--unload 5
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs_4.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics_salt'
++>INTO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:16.391
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:16.448
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.006
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:16.449
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:20.244
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:20.247
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.003
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:20.247
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:16.503
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.459
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:16.503
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:16.551
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.047
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:20.801
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.554
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:20.801
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:20.849
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.048
--- 20000 row(s) unloaded.
>>
>>log;
-regrhadoop.ksh fs -du -s /bulkload/customer_demographics_salt/merged_customer_demogs_4.gz
-78431 78431 /bulkload/customer_demographics_salt/merged_customer_demogs_4.gz
+regrhadoop.ksh fs -du -s /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_4.gz
+78431 78431 /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_4.gz
>>
>>--exp4
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/bulkload/customer_demographics_salt'
++>UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
@@ -679,26 +678,26 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
>>--unload 6
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
-+>--MERGE FILE '/bulkload/merged_customer_demogs_2.gz' OVERWRITE
++>--MERGE FILE '/user/trafodion/bulkload/merged_customer_demogs_2.gz' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics_salt'
++>INTO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:19.875
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:19.883
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:19.883
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:24.752
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:24.849
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.010
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:24.850
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:20.492
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.609
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:24.711
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.626
--- 20000 row(s) unloaded.
>>
>>log;
-regrhadoop.ksh fs -cat /bulkload/customer_demographics_salt/file* | wc -l
+regrhadoop.ksh fs -cat /user/trafodion/bulkload/customer_demographics_salt/file* | wc -l
20000
-regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/file* | grep file | wc -l
+regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/file* | grep file | wc -l
4
>>
>>--unload 7
@@ -706,28 +705,28 @@ regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/file* | grep file |
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs_2.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics_salt'
++>INTO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:26.413
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:26.427
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.014
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:26.427
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:30.834
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:30.854
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.020
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:30.854
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:26.994
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.567
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:26.994
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:27.511
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.056
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:31.428
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.574
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:31.428
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:31.517
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.089
--- 20000 row(s) unloaded.
>>
->>sh regrhadoop.ksh fs -copyToLocal /bulkload/customer_demographics_salt/merged_customer_demogs_2.gz /tmp ;
+>>sh regrhadoop.ksh fs -copyToLocal /user/trafodion/bulkload/customer_demographics_salt/merged_customer_demogs_2.gz /tmp ;
>>sh gunzip -f /tmp/merged_customer_demogs_2.gz ;
->>sh regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/* ;
->>sh regrhadoop.ksh fs -copyFromLocal /tmp/merged_customer_demogs_2 /user/hive/exttables/unload_customer_demographics ;
->>sh rm /bulkload/merged_customer_demogs_2 ;
+>>sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/* ;
+>>sh regrhadoop.ksh fs -copyFromLocal /tmp/merged_customer_demogs_2 /user/trafodion/hive/exttables/unload_customer_demographics ;
+>>sh rm /user/trafodion/bulkload/merged_customer_demogs_2 ;
>>
>>cqd HIVE_MAX_STRING_LENGTH_IN_BYTES '100';
@@ -847,24 +846,24 @@ CD_DEMO_SK CD_GENDER
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs_4.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics_salt'
++>INTO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:36.975
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:36.980
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:36.980
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:42.894
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:42.904
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.010
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:42.904
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:37.782
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.802
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:37.782
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:37.834
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.051
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:43.534
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.630
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:43.534
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:43.583
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.049
--- 20000 row(s) unloaded.
>>log;
-regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/merged* | grep merge | wc -l
+regrhadoop.ksh fs -ls /user/trafodion/bulkload/customer_demographics_salt/merged* | grep merge | wc -l
1
>>
>>
@@ -874,7 +873,7 @@ regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/merged* | grep merge
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demogs_2' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_demographics_salt'
++>INTO '/user/trafodion/bulkload/customer_demographics_salt'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
@@ -890,20 +889,20 @@ regrhadoop.ksh fs -ls /bulkload/customer_demographics_salt/merged* | grep merge
+>WITH PURGEDATA FROM TARGET
+>MERGE FILE 'merged_customer_demographics' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:41.344
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:41.352
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:46.855
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:46.863
Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:41.352
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:46.863
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:41.871
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.518
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:15:41.871
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:15:41.922
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.051
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:47.716
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.852
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:49:47.716
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:49:47.799
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.083
--- 20000 row(s) unloaded.
>>--sh sleep 10;
@@ -948,17 +947,17 @@ CD_DEMO_SK CD_GENDER
+>WITH PURGEDATA FROM TARGET
+>--MERGE FILE 'merged_customer_demographics' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:43.724
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:43.729
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:43.729
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:49.955
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:49.962
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.007
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:49.962
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:44.561
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.832
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:50.550
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.588
--- 20000 row(s) unloaded.
>>--sh sleep 10;
@@ -966,7 +965,7 @@ Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.832
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491516941, failedModTS = 1491516943, failedLoc = hdfs://localhost:30200/user/hive/exttables/unload_customer_demographics
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493664587, failedModTS = 1493664590, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/unload_customer_demographics
(EXPR)
--------------------
@@ -1005,16 +1004,16 @@ CD_DEMO_SK CD_GENDER
>>--unload 12
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
-+>INTO '/user/hive/exttables/unload_customer_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_address'
+>select * from trafodion.hbase.customer_address ;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:47.646
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:47.101
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.037
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:47.101
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:53.855
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:53.858
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.003
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:53.858
Rows Processed: 50000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:47.960
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.859
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:54.791
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.933
--- 50000 row(s) unloaded.
>>--sh sleep 10;
@@ -1060,16 +1059,16 @@ CA_ADDRESS_SK CA_ADDRESS_ID
>>--test with numeric delimiers
>>UNLOAD
+>WITH PURGEDATA FROM TARGET DELIMITER 124 RECORD_SEPARATOR 10
-+>INTO '/user/hive/exttables/unload_customer_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_address'
+>select * from trafodion.hbase.customer_address ;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:50.334
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:50.339
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:50.339
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:49:57.416
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:49:57.421
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.006
+Task: EXTRACT Status: Started Time: 2017-05-01 18:49:57.422
Rows Processed: 50000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:51.398
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.059
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:49:58.307
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.885
--- 50000 row(s) unloaded.
>>--sh sleep 10;
@@ -1077,7 +1076,7 @@ Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.059
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491516947, failedModTS = 1491516950, failedLoc = hdfs://localhost:30200/user/hive/exttables/unload_customer_address
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493664594, failedModTS = 1493664597, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/unload_customer_address
(EXPR)
--------------------
@@ -1128,16 +1127,16 @@ CA_ADDRESS_SK CA_ADDRESS_ID
+>WITH PURGEDATA FROM TARGET
+>--MERGE FILE 'merged_customer_demographics' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/user/hive/exttables/unload_customer'
++>INTO '/user/trafodion/hive/exttables/unload_customer'
+>select * from trafodion.hbase.customer_salt;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:15:54.359
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:15:54.366
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.006
-Task: EXTRACT Status: Started Time: 2017-04-06 22:15:54.366
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:01.495
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:01.497
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.002
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:01.497
Rows Processed: 100000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:15:58.306
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:03.665
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:04.887
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:03.390
--- 100000 row(s) unloaded.
>>--sh sleep 10;
@@ -1184,16 +1183,16 @@ C_CUSTOMER_SK C_CUSTOMER_ID
+>WITH PURGEDATA FROM TARGET
+>--MERGE FILE 'merged_customer_demographics' OVERWRITE
+>--COMPRESSION GZIP
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:16:00.724
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:16:00.732
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: EXTRACT Status: Started Time: 2017-04-06 22:16:00.732
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:07.724
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:07.734
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.009
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:07.734
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:16:00.991
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.259
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:08.712
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.337
--- 20000 row(s) unloaded.
>>--sh sleep 10;
@@ -1239,19 +1238,19 @@ CD_DEMO_SK CD_GENDER
+> PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
+>MERGE FILE 'merged_customer_address.gz' OVERWRITE
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_address'
++>INTO '/user/trafodion/bulkload/customer_address'
+>select * from trafodion.hbase.customer_address where ca_address_sk < 100;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:16:02.675
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:16:02.679
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:10.860
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:10.909
Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:16:02.679
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:10.910
Rows Processed: 99
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:16:02.736
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.056
-Task: MERGE FILES Status: Started Time: 2017-04-06 22:16:02.736
-Task: MERGE FILES Status: Ended Time: 2017-04-06 22:16:02.759
-Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.023
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:10.159
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.068
+Task: MERGE FILES Status: Started Time: 2017-05-01 18:50:10.159
+Task: MERGE FILES Status: Ended Time: 2017-05-01 18:50:10.193
+Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.034
--- 99 row(s) unloaded.
>>
@@ -1261,16 +1260,16 @@ Task: MERGE FILES Status: Ended Elapsed Time: 00:00:00.023
+> PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n' NULL_STRING 'NULL'
+>MERGE FILE 'merged_customer_address.gz'
+>COMPRESSION GZIP
-+>INTO '/bulkload/customer_address'
++>INTO '/user/trafodion/bulkload/customer_address'
+>select * from trafodion.hbase.customer_address where ca_address_sk < 100;
-*** ERROR[8965] File /bulkload/customer_address/merged_customer_address.gz already exists.
+*** ERROR[8965] File /user/trafodion/bulkload/customer_address/merged_customer_address.gz already exists.
--- 0 row(s) unloaded.
>>
>>
>>log;
-regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/*
+regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/*
>>
>>
>>cqd HDFS_IO_BUFFERSIZE reset;
@@ -1283,16 +1282,16 @@ regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/*
>>UNLOAD
+>WITH
+> PURGEDATA FROM TARGET DELIMITER '|' RECORD_SEPARATOR '\n'
-+>INTO '/user/hive/exttables/unload_store_sales_summary'
++>INTO '/user/trafodion/hive/exttables/unload_store_sales_summary'
+>select ss_sold_date_sk,ss_store_sk, sum (ss_quantity) from store_sales_salt group by ss_sold_date_sk ,ss_store_sk;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:16:06.343
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:16:06.349
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:16:06.349
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:13.905
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:13.912
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:13.912
Rows Processed: 12349
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:16:10.661
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:04.312
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:18.119
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:04.207
--- 12349 row(s) unloaded.
>>--sh sleep 10;
@@ -1407,16 +1406,16 @@ SS_SOLD_DATE_SK SS_STORE_SK SS_QUANTITY
>>--unload 18
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
-+>INTO '/user/hive/exttables/unload_customer_and_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_and_address'
+>select * from trafodion.hbase.customer_salt c join trafodion.hbase.customer_address ca on c.c_current_addr_sk = ca.ca_address_sk ;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:16:12.575
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:16:12.581
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: EXTRACT Status: Started Time: 2017-04-06 22:16:12.581
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:20.184
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:20.186
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.003
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:20.186
Rows Processed: 100000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:16:15.847
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:03.267
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:23.138
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:02.952
--- 100000 row(s) unloaded.
>>--sh sleep 10;
@@ -1460,16 +1459,16 @@ C_CUSTOMER_SK C_CUSTOMER_ID
>>UNLOAD
+>WITH
+> PURGEDATA FROM TARGET
-+>INTO '/user/hive/exttables/unload_customer_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_address'
+>select * from customer_address where ca_address_sk < 1000 union select * from customer_address where ca_address_sk > 40000 and ca_address_sk < 41000;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:16:19.304
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:16:19.318
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.014
-Task: EXTRACT Status: Started Time: 2017-04-06 22:16:19.318
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:50:27.804
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:50:27.810
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
+Task: EXTRACT Status: Started Time: 2017-05-01 18:50:27.810
Rows Processed: 1998
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:16:19.610
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.292
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:50:28.224
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.415
--- 1998 row(s) unloaded.
>>--sh sleep 10;
@@ -1580,7 +1579,7 @@ ESP_EXCHANGE ============================== SEQ_NO 3 ONLY CHILD 2
use_snapshot_scan ...... TRUE
full_table_name ........ TRAFODION.HBASE.CUSTOMER_DEMOGRAPHICS_SALT
snapshot_name .......... TRAFODION.HBASE.CUSTOMER_DEMOGRAPHICS_SALT_SNAP111
- snapshot_temp_location /bulkload/20170406221624/
+ snapshot_temp_location /user/trafodion/bulkload/20170501185033/
grep -i -e 'explain reg' -e snapshot -e full_table_name -e esp_exchange LOG018_REGULAR_SCAN_PLAN.TXT | grep -v snapshot_scan_run_id
>>--no snapshot
>>explain reg;
@@ -1664,7 +1663,7 @@ grep -i -e 'explain snp' -e snapshot -e full_table_name -e esp_exchange LOG018_S
use_snapshot_scan ...... TRUE
full_table_name ........ TRAFODION.HBASE.CUSTOMER_ADDRESS
snapshot_name .......... TRAFODION.HBASE.CUSTOMER_ADDRESS_SNAP111
- snapshot_temp_location /bulkload/20170406221732/
+ snapshot_temp_location /user/trafodion/bulkload/20170501185041/
grep -i -e 'explain reg' -e snapshot -e full_table_name -e esp_exchange LOG018_REGULAR_SCAN_PLAN.TXT | grep -v snapshot_scan_run_id
>>--no snapshot
>>explain reg;
@@ -1759,12 +1758,12 @@ ESP_EXCHANGE ============================== SEQ_NO 6 ONLY CHILD 5
use_snapshot_scan ...... TRUE
full_table_name ........ TRAFODION.HBASE.CUSTOMER_SALT
snapshot_name .......... TRAFODION.HBASE.CUSTOMER_SALT_SNAP111
- snapshot_temp_location /bulkload/20170406221755/
+ snapshot_temp_location /user/trafodion/bulkload/20170501185103/
ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1
use_snapshot_scan ...... TRUE
full_table_name ........ TRAFODION.HBASE.CUSTOMER_ADDRESS
snapshot_name .......... TRAFODION.HBASE.CUSTOMER_ADDRESS_SNAP111
- snapshot_temp_location /bulkload/20170406221755/
+ snapshot_temp_location /user/trafodion/bulkload/20170501185103/
grep -i -e 'explain reg' -e snapshot -e full_table_name -e esp_exchange LOG018_REGULAR_SCAN_PLAN.TXT | grep -v snapshot_scan_run_id
>>--no snapshot
>>explain reg;
@@ -1860,7 +1859,7 @@ C_CUSTOMER_SK C_CUSTOMER_ID C_CURRENT_CDEMO_SK C_CURRENT_HDEMO_SK C_CURRE
--- SQL operation complete.
>> -- allow the extract syntax
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/bulkload/customer_address'
++>UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_address'
+>select * from trafodion.hbase.customer_address <<+ cardinality 10e10 >>;
LC RC OP OPERATOR OPT DESCRIPTION CARD
@@ -1878,21 +1877,21 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> EXISTING SNAPSHOT HAVING SUFFIX 'SNAP111'
-+>INTO '/user/hive/exttables/unload_customer_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_address'
+>select * from customer_address
+><<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:20:33.855
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:20:33.860
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.004
-Task: VERIFY SNAPSHO Status: Started Time: 2017-04-06 22:20:33.860
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:52:43.214
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:52:43.224
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.009
+Task: VERIFY SNAPSHO Status: Started Time: 2017-05-01 18:52:43.224
Snapshots verified: 1
-Task: VERIFY SNAPSHO Status: Ended Time: 2017-04-06 22:20:34.402
-Task: VERIFY SNAPSHO Status: Ended Elapsed Time: 00:00:00.543
-Task: EXTRACT Status: Started Time: 2017-04-06 22:20:34.403
+Task: VERIFY SNAPSHO Status: Ended Time: 2017-05-01 18:52:43.607
+Task: VERIFY SNAPSHO Status: Ended Elapsed Time: 00:00:00.383
+Task: EXTRACT Status: Started Time: 2017-05-01 18:52:43.607
Rows Processed: 50000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:20:35.493
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.091
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:52:44.813
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.206
--- 50000 row(s) unloaded.
>>
@@ -1941,7 +1940,7 @@ CA_ADDRESS_SK CA_ADDRESS_ID
--- SQL operation complete.
>> -- allow the extract syntax
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/user/hive/exttables/unload_customer_demographics'
++>UNLOAD EXTRACT TO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
LC RC OP OPERATOR OPT DESCRIPTION CARD
@@ -1960,20 +1959,20 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> EXISTING SNAPSHOT HAVING SUFFIX 'SNAP111'
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:20:39.602
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:20:39.628
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.003
-Task: VERIFY SNAPSHO Status: Started Time: 2017-04-06 22:20:39.629
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:52:48.844
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:52:48.846
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.002
+Task: VERIFY SNAPSHO Status: Started Time: 2017-05-01 18:52:48.846
Snapshots verified: 1
-Task: VERIFY SNAPSHO Status: Ended Time: 2017-04-06 22:20:39.506
-Task: VERIFY SNAPSHO Status: Ended Elapsed Time: 00:00:00.444
-Task: EXTRACT Status: Started Time: 2017-04-06 22:20:39.506
+Task: VERIFY SNAPSHO Status: Ended Time: 2017-05-01 18:52:49.204
+Task: VERIFY SNAPSHO Status: Ended Elapsed Time: 00:00:00.358
+Task: EXTRACT Status: Started Time: 2017-05-01 18:52:49.204
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:20:48.647
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:09.141
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:52:57.416
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:08.211
--- 20000 row(s) unloaded.
>>
@@ -2017,24 +2016,24 @@ CD_DEMO_SK CD_GENDER
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> NEW SNAPSHOT HAVING SUFFIX 'SNAP112'
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:20:50.443
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:20:50.452
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: CREATE SNAPSHO Status: Started Time: 2017-04-06 22:20:50.452
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:52:59.366
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:52:59.377
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.011
+Task: CREATE SNAPSHO Status: Started Time: 2017-05-01 18:52:59.377
Snapshots created: 1
-Task: CREATE SNAPSHO Status: Ended Time: 2017-04-06 22:20:51.116
-Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:00.664
-Task: EXTRACT Status: Started Time: 2017-04-06 22:20:51.116
+Task: CREATE SNAPSHO Status: Ended Time: 2017-05-01 18:53:00.390
+Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:01.013
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:00.393
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:20:52.248
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.132
-Task: DELETE SNAPSHO Status: Started Time: 2017-04-06 22:20:52.248
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:01.609
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.216
+Task: DELETE SNAPSHO Status: Started Time: 2017-05-01 18:53:01.609
Snapshots deleted: 1
-Task: DELETE SNAPSHO Status: Ended Time: 2017-04-06 22:20:52.262
-Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.013
+Task: DELETE SNAPSHO Status: Ended Time: 2017-05-01 18:53:01.627
+Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.018
--- 20000 row(s) unloaded.
>>
@@ -2042,7 +2041,7 @@ Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.013
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491517248, failedModTS = 1491517251, failedLoc = hdfs://localhost:30200/user/hive/exttables/unload_customer_demographics
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493664777, failedModTS = 1493664780, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/unload_customer_demographics
(EXPR)
--------------------
@@ -2082,24 +2081,24 @@ CD_DEMO_SK CD_GENDER
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>select * from trafodion.hbase.customer_demographics_salt <<+ cardinality 10e10 >>;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:20:54.745
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:20:54.756
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.011
-Task: CREATE SNAPSHO Status: Started Time: 2017-04-06 22:20:54.756
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:53:04.714
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:53:04.726
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.012
+Task: CREATE SNAPSHO Status: Started Time: 2017-05-01 18:53:04.726
Snapshots created: 1
-Task: CREATE SNAPSHO Status: Ended Time: 2017-04-06 22:20:56.343
-Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:01.587
-Task: EXTRACT Status: Started Time: 2017-04-06 22:20:56.343
+Task: CREATE SNAPSHO Status: Ended Time: 2017-05-01 18:53:06.337
+Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:01.611
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:06.337
Rows Processed: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:20:57.250
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.907
-Task: DELETE SNAPSHO Status: Started Time: 2017-04-06 22:20:57.250
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:07.193
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.856
+Task: DELETE SNAPSHO Status: Started Time: 2017-05-01 18:53:07.193
Snapshots deleted: 1
-Task: DELETE SNAPSHO Status: Ended Time: 2017-04-06 22:20:57.256
-Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.006
+Task: DELETE SNAPSHO Status: Ended Time: 2017-05-01 18:53:07.204
+Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.011
--- 20000 row(s) unloaded.
>>
@@ -2115,7 +2114,7 @@ Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.006
*** WARNING[8597] Statement was automatically retried 1 time(s). Delay before each retry was 0 seconds. See next entry for the error that caused this retry.
-*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1491517252, failedModTS = 1491517256, failedLoc = hdfs://localhost:30200/user/hive/exttables/unload_customer_demographics
+*** WARNING[8436] Mismatch detected between compiletime and runtime hive table definitions. DataModMismatchDetails: compiledModTS = 1493664781, failedModTS = 1493664786, failedLoc = hdfs://localhost:25600/user/trafodion/hive/exttables/unload_customer_demographics
CD_DEMO_SK CD_GENDER CD_MARITAL_STATUS CD_EDUCATION_STATUS CD_PURCHASE_ESTIMATE CD_CREDIT_RATING CD_DEP_COUNT CD_DEP_EMPLOYED_COUNT CD_DEP_COLLEGE_COUNT
----------- ---------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------- -------------------- ---------------------------------------------------------------------------------------------------- ------------ --------------------- --------------------
@@ -2148,24 +2147,24 @@ CD_DEMO_SK CD_GENDER
+>WITH
+> PURGEDATA FROM TARGET
+> NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-+>INTO '/user/hive/exttables/unload_customer_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_address'
+>select * from customer_address where ca_address_sk < 1000 union select * from customer_address where ca_address_sk > 40000 and ca_address_sk < 41000;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:20:59.803
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:20:59.808
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:53:10.415
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:53:10.420
Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.005
-Task: CREATE SNAPSHO Status: Started Time: 2017-04-06 22:20:59.808
+Task: CREATE SNAPSHO Status: Started Time: 2017-05-01 18:53:10.420
Snapshots created: 1
-Task: CREATE SNAPSHO Status: Ended Time: 2017-04-06 22:21:01.106
-Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:01.298
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:01.106
+Task: CREATE SNAPSHO Status: Ended Time: 2017-05-01 18:53:11.649
+Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:01.228
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:11.649
Rows Processed: 1998
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:21:01.858
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.751
-Task: DELETE SNAPSHO Status: Started Time: 2017-04-06 22:21:01.858
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:12.226
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.577
+Task: DELETE SNAPSHO Status: Started Time: 2017-05-01 18:53:12.226
Snapshots deleted: 1
-Task: DELETE SNAPSHO Status: Ended Time: 2017-04-06 22:21:01.861
-Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.004
+Task: DELETE SNAPSHO Status: Ended Time: 2017-05-01 18:53:12.234
+Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.008
--- 1998 row(s) unloaded.
>>
@@ -2236,24 +2235,24 @@ CA_ADDRESS_SK CA_ADDRESS_ID
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> NEW SNAPSHOT HAVING SUFFIX 'SNAP'
-+>INTO '/user/hive/exttables/unload_customer_and_address'
++>INTO '/user/trafodion/hive/exttables/unload_customer_and_address'
+>select * from trafodion.hbase.customer_salt c join trafodion.hbase.customer_address ca on c.c_current_addr_sk = ca.ca_address_sk ;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:21:03.814
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:21:03.821
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.008
-Task: CREATE SNAPSHO Status: Started Time: 2017-04-06 22:21:03.821
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:53:14.484
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:53:14.491
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.007
+Task: CREATE SNAPSHO Status: Started Time: 2017-05-01 18:53:14.491
Snapshots created: 2
-Task: CREATE SNAPSHO Status: Ended Time: 2017-04-06 22:21:06.616
-Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:02.795
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:06.616
+Task: CREATE SNAPSHO Status: Ended Time: 2017-05-01 18:53:17.824
+Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:03.333
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:17.824
Rows Processed: 100000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:21:10.370
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:03.754
-Task: DELETE SNAPSHO Status: Started Time: 2017-04-06 22:21:10.370
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:22.629
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:04.176
+Task: DELETE SNAPSHO Status: Started Time: 2017-05-01 18:53:22.736
Snapshots deleted: 2
-Task: DELETE SNAPSHO Status: Ended Time: 2017-04-06 22:21:10.388
-Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.017
+Task: DELETE SNAPSHO Status: Ended Time: 2017-05-01 18:53:22.197
+Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.019
--- 100000 row(s) unloaded.
>>--sh sleep 10;
@@ -2299,7 +2298,7 @@ C_CUSTOMER_SK C_CUSTOMER_ID
--- SQL operation complete.
>> -- allow the extract syntax
>>explain options 'f'
-+>UNLOAD EXTRACT TO '/bulkload/customer_name'
++>UNLOAD EXTRACT TO '/user/trafodion/bulkload/customer_name'
+>select c_first_name,c_last_name from trafodion.hbase.customer_salt;
LC RC OP OPERATOR OPT DESCRIPTION CARD
@@ -2317,24 +2316,24 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
+> NEW SNAPSHOT HAVING SUFFIX 'SNAP111'
-+>INTO '/user/hive/exttables/unload_customer_name'
++>INTO '/user/trafodion/hive/exttables/unload_customer_name'
+>select c_first_name,c_last_name from trafodion.hbase.customer_salt;
Task: UNLOAD Status: Started
-Task: EMPTY TARGET Status: Started Time: 2017-04-06 22:21:13.583
-Task: EMPTY TARGET Status: Ended Time: 2017-04-06 22:21:13.603
-Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.020
-Task: CREATE SNAPSHO Status: Started Time: 2017-04-06 22:21:13.603
+Task: EMPTY TARGET Status: Started Time: 2017-05-01 18:53:25.364
+Task: EMPTY TARGET Status: Ended Time: 2017-05-01 18:53:25.366
+Task: EMPTY TARGET Status: Ended Elapsed Time: 00:00:00.002
+Task: CREATE SNAPSHO Status: Started Time: 2017-05-01 18:53:25.366
Snapshots created: 1
-Task: CREATE SNAPSHO Status: Ended Time: 2017-04-06 22:21:14.362
-Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:00.759
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:14.362
+Task: CREATE SNAPSHO Status: Ended Time: 2017-05-01 18:53:26.197
+Task: CREATE SNAPSHO Status: Ended Elapsed Time: 00:00:00.653
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:26.198
Rows Processed: 100000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:21:16.999
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.737
-Task: DELETE SNAPSHO Status: Started Time: 2017-04-06 22:21:16.100
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:27.567
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:01.548
+Task: DELETE SNAPSHO Status: Started Time: 2017-05-01 18:53:27.567
Snapshots deleted: 1
-Task: DELETE SNAPSHO Status: Ended Time: 2017-04-06 22:21:16.105
-Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.006
+Task: DELETE SNAPSHO Status: Ended Time: 2017-05-01 18:53:27.576
+Task: DELETE SNAPSHO Status: Ended Elapsed Time: 00:00:00.009
--- 100000 row(s) unloaded.
>>--sh sleep 10;
@@ -2378,31 +2377,31 @@ C_FIRST_NAME
>>--unload 100 --should give error [8447]
>>unload into '//\a//c' select * from CUSTOMER_ADDRESS;
Task: UNLOAD Status: Started
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:19.554
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:31.145
*** ERROR[8447] An error occurred during hdfs access. Error Detail: Java exception in hdfsCreate(). java.io.IOException: No FileSystem for scheme: null
-org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2584)
-org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2591)
-org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:91)
-org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2630)
-org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2612)
-org.apache.hadoop.fs.FileSystem.get(FileSystem.java:370)
+org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2676)
+org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2690)
+org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)
+org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2733)
+org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2715)
+org.apache.hadoop.fs.FileSystem.get(FileSystem.java:382)
org.trafodion.sql.SequenceFileWriter.hdfsCreate(SequenceFileWriter.java:155)
--- 0 row(s) unloaded.
>>
>>--unload 101 --should give syntax error
->>unload with delimiter 0 into '/bulkload/test' select * from CUSTOMER_ADDRESS;
+>>unload with delimiter 0 into '/user/trafodion/bulkload/test' select * from CUSTOMER_ADDRESS;
*** ERROR[15001] A syntax error occurred at or before:
-unload with delimiter 0 into '/bulkload/test' select * from CUSTOMER_ADDRESS;
+unload with delimiter 0 into '/user/trafodion/bulkload/test' select * from CUST
^ (23 characters from start of SQL statement)
*** ERROR[8822] The statement was not prepared.
>>
>>--unload 102 --should give an error
->>unload with MERGE FILE 'folder/cust_addr' into '/bulkload/test' select * from customer_address;
+>>unload with MERGE FILE 'folder/cust_addr' into '/user/trafodion/bulkload/test' select * from customer_address;
*** ERROR[4487] Invalid Path: MERGE FILE 'folder/cust_addr' cannot contain the '/' character.
@@ -2410,16 +2409,16 @@ unload with delimiter 0 into '/bulkload/test' select * from CUSTOMER_ADDRESS;
>>
>>--unload 103 -- should not give an error
->>unload with delimiter '\a' into '/bulkload/test' select * from customer_address;
+>>unload with delimiter '\a' into '/user/trafodion/bulkload/test' select * from customer_address;
Task: UNLOAD Status: Started
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:20.555
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:31.774
Rows Processed: 50000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:21:20.962
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.906
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:32.634
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.860
--- 50000 row(s) unloaded.
>>--unload 24 -- should give an error
->>unload with delimiter 'abca' into '/bulkload/test' select * from customer_address;
+>>unload with delimiter 'abca' into '/user/trafodion/bulkload/test' select * from customer_address;
*** ERROR[4379] Invalid BULK UNLOAD field delimiter or record separator. A valid field delimiter or record separator must be a single character or an integer between 1 and 255.
@@ -2427,25 +2426,26 @@ Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.906
>>
>>--unload 104 -- should give an error
->>unload with record_separator '\abca' into '/bulkload/test' select * from customer_address;
+>>unload with record_separator '\abca' into '/user/trafodion/bulkload/test' select * from customer_address;
*** ERROR[4374] Invalid escape sequence specified as BULK UNLOAD field delimiter or record separator. Only the following escape sequences are allowed: \a, \b, \f, \n, \r, \t, or \v.
*** ERROR[8822] The statement was not prepared.
>>--unload 105 -- should give an error
->>unload with record_separator '\z' into '/bulkload/test' select * from customer_address;
+>>unload with record_separator '\z' into '/user/trafodion/bulkload/test' select * from customer_address;
*** ERROR[4374] Invalid escape sequence specified as BULK UNLOAD field delimiter or record separator. Only the following escape sequences are allowed: \a, \b, \f, \n, \r, \t, or \v.
*** ERROR[8822] The statement was not prepared.
>>--unload 106 --should give error
->>unload into '/bulkload/test' select * from customer_address order by ca_address_id;
+>>unload into '/user/trafodion/bulkload/test' select * from customer_address order by ca_address_id;
*** ERROR[15001] A syntax error occurred at or before:
-unload into '/bulkload/test' select * from customer_address order by ca_addres
- ^ (66 characters from start of SQL statement)
+unload into '/user/trafodion/bulkload/test' select * from customer_address ord
+er by ca_address_id;
+ ^ (81 characters from start of SQL statement)
*** ERROR[8822] The statement was not prepared.
@@ -2453,11 +2453,12 @@ unload into '/bulkload/test' select * from customer_address order by ca_addres
>>cqd comp_bool_226 'on';
--- SQL operation complete.
->>unload extract to '/bulkload/test' select * from customer_address order by ca_address_id;
+>>unload extract to '/user/trafodion/bulkload/test' select * from customer_address order by ca_address_id;
*** ERROR[15001] A syntax error occurred at or before:
-unload extract to '/bulkload/test' select * from customer_address order by ca_
- ^ (72 characters from start of SQL statement)
+unload extract to '/user/trafodion/bulkload/test' select * from customer_addre
+ss order by ca_address_id;
+ ^ (87 characters from start of SQL statement)
*** ERROR[8822] The statement was not prepared.
@@ -2468,20 +2469,20 @@ unload extract to '/bulkload/test' select * from customer_address order by ca_
>>--unload 150
>>
>>log;
-regrhadoop.ksh fs -rm /user/hive/exttables/unload_customer_demographics/*
+regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/unload_customer_demographics/*
>>
>>CQD TRAF_UNLOAD_SKIP_WRITING_TO_FILES 'ON';
--- SQL operation complete.
>>UNLOAD
+>WITH PURGEDATA FROM TARGET
-+>INTO '/user/hive/exttables/unload_customer_demographics'
++>INTO '/user/trafodion/hive/exttables/unload_customer_demographics'
+>(select * from trafodion.hbase.customer_demographics_salt) ;
Task: UNLOAD Status: Started
-Task: EXTRACT Status: Started Time: 2017-04-06 22:21:24.484
+Task: EXTRACT Status: Started Time: 2017-05-01 18:53:36.154
Rows Processed but NOT Written to Disk: 20000
-Task: EXTRACT Status: Ended Time: 2017-04-06 22:21:24.849
-Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.365
+Task: EXTRACT Status: Ended Time: 2017-05-01 18:53:36.660
+Task: EXTRACT Status: Ended Elapsed Time: 00:00:00.506
--- 20000 row(s) unloaded.
>>--sh sleep 10;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/FILTER005
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/FILTER005 b/core/sql/regress/hive/FILTER005
index dc462ef..b128157 100755
--- a/core/sql/regress/hive/FILTER005
+++ b/core/sql/regress/hive/FILTER005
@@ -28,7 +28,7 @@ if [ "$fil" = "" ]; then
fi
sed "
-s/Logging Location: \/bulkload\/logs\/ERR.*/Logging Location:@loggingLocation@/
-s/Logging Location: \/bulkload\/logs\/TEST005\/ERR.*/Logging Location:@TEST005LoggingLocation@/
+s/Logging Location: \/user\/trafodion\/bulkload\/logs\/ERR.*/Logging Location:@loggingLocation@/
+s/Logging Location: \/user\/trafodion\/bulkload\/logs\/TEST005\/ERR.*/Logging Location:@TEST005LoggingLocation@/
s/Time:.*/Time:@time@/
" $fil
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST003
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST003 b/core/sql/regress/hive/TEST003
index 78d74a9..a2144f5 100644
--- a/core/sql/regress/hive/TEST003
+++ b/core/sql/regress/hive/TEST003
@@ -23,31 +23,31 @@
-- @@@ END COPYRIGHT @@@
--if dirs exist belo will fail
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_customer;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_promotion;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_customer_address;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_store;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_store_sales;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_customer_demographics;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_date_dim;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_time_dim;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_item;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_household_demographics;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_customerNaddress;
-sh regrhadoop.ksh fs -mkdir /user/hive/exttables/ins_store_sales_summary;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_customer;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_promotion;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_customer_address;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_store;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_store_sales;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_customer_demographics;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_date_dim;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_time_dim;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_item;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_household_demographics;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_customerNaddress;
+sh regrhadoop.ksh fs -mkdir /user/trafodion/hive/exttables/ins_store_sales_summary;
--empty folders
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_customer/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_promotion/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_customer_address/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_store/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_store_sales/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_customer_demographics/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_date_dim/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_time_dim/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_item/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_household_demographics/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_customerNaddress/*;
-sh regrhadoop.ksh fs -rm /user/hive/exttables/ins_store_sales_summary/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_customer/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_promotion/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_customer_address/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_store/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_store_sales/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_customer_demographics/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_date_dim/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_time_dim/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_item/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_household_demographics/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_customerNaddress/*;
+sh regrhadoop.ksh fs -rm /user/trafodion/hive/exttables/ins_store_sales_summary/*;
--- setup tabkles to insert into
@@ -176,9 +176,9 @@ control query shape cut;
select [first 12] 'test lp bug # 1355477' from ins_store_sales_summary;
-- hadoop ls should return 2 files
--- sh regrhadoop.ksh fs -ls /user/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
+-- sh regrhadoop.ksh fs -ls /user/trafodion/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
log;
-sh regrhadoop.ksh fs -ls /user/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
+sh regrhadoop.ksh fs -ls /user/trafodion/hive/exttables/ins_store_sales_summary/* | grep ins_store_sales_summary | wc -l | tee -a LOG003;
log LOG003_orig_store_sales_summary.dat clear;
select ss_sold_date_sk,ss_store_sk, sum (ss_quantity) from store_sales group by ss_sold_date_sk ,ss_store_sk order by ss_sold_date_sk,ss_store_sk;
log;
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/f094aa0d/core/sql/regress/hive/TEST003_create_hive_tables.hive
----------------------------------------------------------------------
diff --git a/core/sql/regress/hive/TEST003_create_hive_tables.hive b/core/sql/regress/hive/TEST003_create_hive_tables.hive
index a7edb76..aa2db88 100644
--- a/core/sql/regress/hive/TEST003_create_hive_tables.hive
+++ b/core/sql/regress/hive/TEST003_create_hive_tables.hive
@@ -42,7 +42,7 @@ create external table ins_customer
c_last_review_date string
)
row format delimited fields terminated by '|' LINES TERMINATED BY '\n'
-location '/user/hive/exttables/ins_customer';
+location '/user/trafodion/hive/exttables/ins_customer';
drop table ins_promotion;
@@ -69,7 +69,7 @@ create external table ins_promotion
p_discount_active string
)
row format delimited fields terminated by '|' LINES TERMINATED BY '\n'
-location '/user/hive/exttables/ins_promotion';
+location '/user/trafodion/hive/exttables/ins_promotion';
drop table ins_customer_address;
create external table ins_customer_address
@@ -89,7 +89,7 @@ create external table ins_customer_address
ca_location_type string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_customer_address';
+location '/user/trafodion/hive/exttables/ins_customer_address';
@@ -127,7 +127,7 @@ create external table ins_store
s_tax_precentage float
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_store';
+location '/user/trafodion/hive/exttables/ins_store';
@@ -159,7 +159,7 @@ create external table ins_store_sales
ss_net_profit float
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_store_sales';
+location '/user/trafodion/hive/exttables/ins_store_sales';
drop table ins_customer_demographics;
@@ -176,7 +176,7 @@ create external table ins_customer_demographics
cd_dep_college_count int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_customer_demographics';
+location '/user/trafodion/hive/exttables/ins_customer_demographics';
drop table ins_date_dim;
create external table ins_date_dim
@@ -211,7 +211,7 @@ create external table ins_date_dim
d_current_year string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_date_dim';
+location '/user/trafodion/hive/exttables/ins_date_dim';
drop table ins_time_dim;
create external table ins_time_dim
@@ -228,7 +228,7 @@ create external table ins_time_dim
t_meal_time string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_time_dim';
+location '/user/trafodion/hive/exttables/ins_time_dim';
drop table ins_item;
create external table ins_item
@@ -257,7 +257,7 @@ create external table ins_item
i_product_name string
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_item';
+location '/user/trafodion/hive/exttables/ins_item';
drop table ins_household_demographics;
create external table ins_household_demographics
@@ -269,7 +269,7 @@ create external table ins_household_demographics
hd_vehicle_count int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_household_demographics';
+location '/user/trafodion/hive/exttables/ins_household_demographics';
drop table ins_customerNaddress;
create external table ins_customerNaddress
@@ -292,7 +292,7 @@ create external table ins_customerNaddress
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_customerNaddress';
+location '/user/trafodion/hive/exttables/ins_customerNaddress';
drop table ins_store_sales_summary;
@@ -305,7 +305,7 @@ create external table ins_store_sales_summary
ss_quantity int
)
row format delimited fields terminated by '|'
-location '/user/hive/exttables/ins_store_sales_summary';
+location '/user/trafodion/hive/exttables/ins_store_sales_summary';
quit;
[6/8] incubator-trafodion git commit: Expected file change for TEST130
Posted by sa...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/a14a3ee1/core/sql/regress/executor/EXPECTED130
----------------------------------------------------------------------
diff --git a/core/sql/regress/executor/EXPECTED130 b/core/sql/regress/executor/EXPECTED130
index 0542d39..87a1d05 100644
--- a/core/sql/regress/executor/EXPECTED130
+++ b/core/sql/regress/executor/EXPECTED130
@@ -63,9 +63,9 @@ C1
C1 C2
----------- ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
----------------------------------------
- 1 LOBH000000020001001830865086431665681818308650874102010718212359906316509152020"TRAFODION"."LOB130"
- 2 LOBH000000020001001830865086431665681818308650874653085818212359906323581515020"TRAFODION"."LOB130"
- 3 LOBH000000020001001830865086431665681818308650875141948918212359906328520376020"TRAFODION"."LOB130"
+ 1 LOBH000000020001004521773054977751551845217730572979065218212360625565346135020"TRAFODION"."LOB130"
+ 2 LOBH000000020001004521773054977751551845217730575426024718212360625592037380020"TRAFODION"."LOB130"
+ 3 LOBH000000020001004521773054977751551845217730577394592318212360625613791173020"TRAFODION"."LOB130"
--- 3 row(s) selected.
>>
@@ -209,21 +209,21 @@ var char 33333333333333333333333333333333333333333333333333333
>>insert into t130lob2 select [first 10] d_date_sk,d_date_id from hive.hive.date_dim;
--- 10 row(s) inserted.
->>select lobtostring(c2,40) from t130lob2 order by c1;
+>>select lobtostring(c2,4) from t130lob2 order by c1;
-(EXPR)
-----------------------------------------
+(EXPR)
+------
-AAAAAAAAOKJNECAA
-AAAAAAAAPKJNECAA
-AAAAAAAAALJNECAA
-AAAAAAAABLJNECAA
-AAAAAAAACLJNECAA
-AAAAAAAADLJNECAA
-AAAAAAAAELJNECAA
-AAAAAAAAFLJNECAA
-AAAAAAAAGLJNECAA
-AAAAAAAAHLJNECAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
+AAAA
--- 10 row(s) selected.
>>delete from t130lob2;
@@ -654,7 +654,7 @@ And the dish ran away with the fork !
>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'tlob130_txt1.txt');/g" >> t130_extract_command;
>>
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432349061818308650940724338718212359906984262764020"TRAFODION"."LOB130"
' , 'tlob130_txt1.txt');
+>>extract lobtofile(LOB 'LOBH000000020001004521773054979487561845217730741742859818212360627257261284020"TRAFODION"."LOB130"
' , 'tlob130_txt1.txt');
Success. Targetfile :tlob130_txt1.txt Length : 19
--- SQL operation complete.
@@ -670,7 +670,7 @@ Success. Targetfile :tlob130_txt1.txt Length : 19
>>sh rm t130_extract_command;
>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'tlob130_deep.jpg');/g" >> t130_extract_command;
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432359101818308650946460854518212359907041579214020"TRAFODION"."LOB130"
' , 'tlob130_deep.jpg');
+>>extract lobtofile(LOB 'LOBH000000020001004521773054979514421845217730753331456518212360627372759041020"TRAFODION"."LOB130"
' , 'tlob130_deep.jpg');
Success. Targetfile :tlob130_deep.jpg Length : 159018
--- SQL operation complete.
@@ -686,7 +686,7 @@ Success. Targetfile :tlob130_deep.jpg Length : 159018
>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'tlob130_anoush.jpg');/g" >> t130_extract_command;
>>
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432359101818308650946460854518212359907041579214020"TRAFODION"."LOB130"
' , 'tlob130_anoush.jpg');
+>>extract lobtofile(LOB 'LOBH000000020001004521773054979514421845217730753331456518212360627372759041020"TRAFODION"."LOB130"
' , 'tlob130_anoush.jpg');
Success. Targetfile :tlob130_anoush.jpg Length : 230150
--- SQL operation complete.
@@ -706,29 +706,29 @@ Success. Targetfile :tlob130_anoush.jpg Length : 230150
--- SQL operation complete.
>>
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /lobs/lob_input_a1.txt;
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_b1.txt /lobs/lob_input_b1.txt;
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_c1.txt /lobs/lob_input_c1.txt;
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_d1.txt /lobs/lob_input_d1.txt;
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_e1.txt /lobs/lob_input_e1.txt;
->>sh regrhadoop.ksh fs -copyFromLocal deep.jpg /lobs/deep.jpg;
->>sh regrhadoop.ksh fs -copyFromLocal anoush.jpg /lobs/anoush.jpg;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /user/trafodion/lobs/lob_input_a1.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_b1.txt /user/trafodion/lobs/lob_input_b1.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_c1.txt /user/trafodion/lobs/lob_input_c1.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_d1.txt /user/trafodion/lobs/lob_input_d1.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_e1.txt /user/trafodion/lobs/lob_input_e1.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal deep.jpg /user/trafodion/lobs/deep.jpg;
+>>sh regrhadoop.ksh fs -copyFromLocal anoush.jpg /user/trafodion/lobs/anoush.jpg;
>>-- the next one is a really long file name intended to test error message 8557
->>sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
+>>sh regrhadoop.ksh fs -copyFromLocal lob_input_a1.txt /user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt;
>>sh sleep(20);
>>
>>
->>insert into tlob130txt2 values (1, filetolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130txt2 values (1, filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
--- 1 row(s) inserted.
>>
>>-- second line
->>insert into tlob130txt2 values (2, filetolob('hdfs:///lobs/lob_input_b1.txt'));
+>>insert into tlob130txt2 values (2, filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'));
--- 1 row(s) inserted.
>>
>>-- third line
->>insert into tlob130txt2 values (3, filetolob('hdfs:///lobs/lob_input_c1.txt'));
+>>insert into tlob130txt2 values (3, filetolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'));
--- 1 row(s) inserted.
>>select lobtostring(c2, 40 ) from tlob130txt2;
@@ -748,7 +748,7 @@ The cow jumped over the moon.
>>--updates
>>
>>--should update with full poem
->>update tlob130txt2 set c2=filetolob('hdfs:///lobs/lob_input_d1.txt', append) where c1 = 3;
+>>update tlob130txt2 set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_d1.txt', append) where c1 = 3;
--- 1 row(s) updated.
>>select lobtostring(c2, 200 ) from tlob130txt2;
@@ -770,7 +770,7 @@ And the dish ran away with the spoon.
--- 3 row(s) selected.
>>
>>-- should see wrong text in the last few lines
->>update tlob130txt2 set c2=filetolob('hdfs:///lobs/lob_input_e1.txt') where c1 =3 ;
+>>update tlob130txt2 set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_e1.txt') where c1 =3 ;
--- 1 row(s) updated.
>>select lobtostring(c2, 200 ) from tlob130txt2;
@@ -805,13 +805,9 @@ And the dish ran away with the fork !
>>log;
>>sh rm t130_extract_command;
>>
->>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_txt2.txt');/g" >> t130_extract_command;
+>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_txt2.txt');/g" >> t130_extract_command;
>>
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432481141818308650954225440318212359907119296896020"TRAFODION"."LOB130"
' , 'hdfs:///lobs/tlob130_txt2.txt');
-Success. Targetfile :hdfs:///lobs/tlob130_txt2.txt Length : 19
-
---- SQL operation complete.
>>
>>--binary input/update
>>
@@ -822,12 +818,8 @@ Success. Targetfile :hdfs:///lobs/tlob130_txt2.txt Length : 19
>>
>>log;
>>sh rm t130_extract_command;
->>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_deep.jpg');/g" >> t130_extract_command;
+>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_deep.jpg');/g" >> t130_extract_command;
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432492771818308650960155056018212359907178472834020"TRAFODION"."LOB130"
' , 'hdfs:///lobs/tlob130_deep.jpg');
-Success. Targetfile :hdfs:///lobs/tlob130_deep.jpg Length : 159018
-
---- SQL operation complete.
>>
>>update tlob130bin2 set c2=filetolob('anoush.jpg') ;
@@ -837,13 +829,9 @@ Success. Targetfile :hdfs:///lobs/tlob130_deep.jpg Length : 159018
>>
>>log;
>>sh rm t130_extract_command;
->>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_anoush.jpg');/g" >> t130_extract_command;
+>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_anoush.jpg');/g" >> t130_extract_command;
>>
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020001001830865086432359101818308650946460854518212359907041579214020"TRAFODION"."LOB130"
' , 'hdfs:///lobs/tlob130_anoush.jpg');
-Success. Targetfile :hdfs:///lobs/tlob130_anoush.jpg Length : 230150
-
---- SQL operation complete.
>>
>>
>>sh clitestdriver 2 < TEST130_argfile 2>&1 | tee -a LOG130;
@@ -859,7 +847,7 @@ Column Name : c2
Input a filename to extract to :
Output File Name : lobc2out.jpg
Extracting lob handle for column c2...
-LOB handle for c2: LOBH000000020001001830865086432359101818308650946460854518212359907041579214020"TRAFODION"."LOB130"
+LOB handle for c2: LOBH000000020001004521773054979514421845217730753331456518212360627372759041020"TRAFODION"."LOB130"
Extracting LOB data length for the above handle...
LOB data length :230150
Extracting lob data into file in chunks ...
@@ -933,19 +921,15 @@ And the dish ran away with the spoon.
>>sh rm t130_extract_command;
>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'tlob130_deep2.jpg');/g" >> t130_extract_command;
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020002001830865086432700361818308650973720712718212359907314128843020"TRAFODION"."LOB130"
' , 'tlob130_deep2.jpg');
+>>extract lobtofile(LOB 'LOBH000000020002004521773054980105951845217730796607893818212360627806139125020"TRAFODION"."LOB130"
' , 'tlob130_deep2.jpg');
Success. Targetfile :tlob130_deep2.jpg Length : 159018
--- SQL operation complete.
>>
>>log;
>>sh rm t130_extract_command;
->>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/lobs\/tlob130_anoush2.jpg');/g" >> t130_extract_command;
+>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'hdfs:\/\/\/user/trafodion/lobs\/tlob130_anoush2.jpg');/g" >> t130_extract_command;
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020003001830865086432700361818308650974204477218212359907318971086020"TRAFODION"."LOB130"
' , 'hdfs:///lobs/tlob130_anoush2.jpg');
-Success. Targetfile :hdfs:///lobs/tlob130_anoush2.jpg Length : 230150
-
---- SQL operation complete.
>>
>>-- combination blob and clob columns
>>create table tlob130bt (c1 int not null, c2 int, c3 blob, c4 clob, primary key (c1));
@@ -969,7 +953,7 @@ Hey diddle diddle,
>>sh rm t130_extract_command;
>>sh grep "^LOBH" TMP130 | sed "s/^/extract lobtofile(LOB '/g" | sed "s/$/' , 'tlob130_anoush3.jpg',create,truncate);/g" >> t130_extract_command;
>>obey t130_extract_command;
->>extract lobtofile(LOB 'LOBH000000020003001830865086432746671818308650978649997618212359907363638742020"TRAFODION"."LOB130"
' , 'tlob130_anoush3.jpg',create,truncate);
+>>extract lobtofile(LOB 'LOBH000000020003004521773054980180651845217730805462671518212360627894957464020"TRAFODION"."LOB130"
' , 'tlob130_anoush3.jpg',create,truncate);
Success. Targetfile :tlob130_anoush3.jpg Length : 230150
--- SQL operation complete.
@@ -1069,13 +1053,13 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT2
=========================
ColumnName : C2
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643303323_0001
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498076106_0001
LOB EOD : 0
LOB Used Len : 0
ColumnName : C3
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643303323_0002
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498076106_0002
LOB EOD : 0
LOB Used Len : 0
ColumnName : C4
@@ -1090,8 +1074,8 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT2
CATALOG_NAME SCHEMA_NAME OBJECT_NAME COLUMN_NAME
LOB_LOCATION LOB_DATA_FILE LOB_DATA_FILE_SIZE_EOD LOB_DATA_FILE_SIZE_USED
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------- -----------------------
-TRAFODION LOB130 TLOB130GT2 C2
/lobs LOBP_00183086508643303323_0001 0 0
-TRAFODION LOB130 TLOB130GT2 C3
/lobs LOBP_00183086508643303323_0002 0 0
+TRAFODION LOB130 TLOB130GT2 C2
/user/trafodion/lobs LOBP_00452177305498076106_0001 0 0
+TRAFODION LOB130 TLOB130GT2 C3
/user/trafodion/lobs LOBP_00452177305498076106_0002 0 0
TRAFODION LOB130 TLOB130GT2 C4
External HDFS Location External HDFS File 0 0
--- 3 row(s) selected.
@@ -1111,18 +1095,18 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT
=========================
ColumnName : C2
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0001
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0001
LOB EOD : 15
LOB Used Len : 15
ColumnName : C3
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0002
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0002
LOB EOD : 15
LOB Used Len : 15
ColumnName : C4
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0003
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0003
LOB EOD : 45
LOB Used Len : 45
@@ -1132,9 +1116,9 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT
CATALOG_NAME SCHEMA_NAME OBJECT_NAME COLUMN_NAME
LOB_LOCATION LOB_DATA_FILE LOB_DATA_FILE_SIZE_EOD LOB_DATA_FILE_SIZE_USED
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------- -----------------------
-TRAFODION LOB130 TLOB130GT C2
/lobs LOBP_00183086508643300979_0001 15 15
-TRAFODION LOB130 TLOB130GT C3
/lobs LOBP_00183086508643300979_0002 15 15
-TRAFODION LOB130 TLOB130GT C4
/lobs LOBP_00183086508643300979_0003 45 45
+TRAFODION LOB130 TLOB130GT C2
/user/trafodion/lobs LOBP_00452177305498070471_0001 15 15
+TRAFODION LOB130 TLOB130GT C3
/user/trafodion/lobs LOBP_00452177305498070471_0002 15 15
+TRAFODION LOB130 TLOB130GT C4
/user/trafodion/lobs LOBP_00452177305498070471_0003 45 45
--- 3 row(s) selected.
>>delete from tlob130gt where c1=2;
@@ -1150,18 +1134,18 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT
=========================
ColumnName : C2
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0001
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0001
LOB EOD : 30
LOB Used Len : 25
ColumnName : C3
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0002
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0002
LOB EOD : 31
LOB Used Len : 26
ColumnName : C4
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643300979_0003
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498070471_0003
LOB EOD : 71
LOB Used Len : 56
@@ -1171,9 +1155,9 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130GT
CATALOG_NAME SCHEMA_NAME OBJECT_NAME COLUMN_NAME
LOB_LOCATION LOB_DATA_FILE LOB_DATA_FILE_SIZE_EOD LOB_DATA_FILE_SIZE_USED
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------- -----------------------
-TRAFODION LOB130 TLOB130GT C2
/lobs LOBP_00183086508643300979_0001 30 25
-TRAFODION LOB130 TLOB130GT C3
/lobs LOBP_00183086508643300979_0002 31 26
-TRAFODION LOB130 TLOB130GT C4
/lobs LOBP_00183086508643300979_0003 71 56
+TRAFODION LOB130 TLOB130GT C2
/user/trafodion/lobs LOBP_00452177305498070471_0001 30 25
+TRAFODION LOB130 TLOB130GT C3
/user/trafodion/lobs LOBP_00452177305498070471_0002 31 26
+TRAFODION LOB130 TLOB130GT C4
/user/trafodion/lobs LOBP_00452177305498070471_0003 71 56
--- 3 row(s) selected.
>>
@@ -1182,49 +1166,52 @@ TRAFODION
>>create table tlob130ext (c1 int not null, c2 blob, c3 clob, c4 blob storage 'external', primary key (c1));
--- SQL operation complete.
+>>
>>create table tlob130ext2 (c1 int not null, c2 blob, c3 clob, c4 blob storage 'external', primary key (c1));
--- SQL operation complete.
>>create table tlob130_not_external (c1 int not null , c2 blob, c3 blob, c4 blob, primary key (c1));
--- SQL operation complete.
->>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
--- 1 row(s) inserted.
->>insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///lobs/lob_input_b1.txt'),externaltolob('hdfs:///lobs/lob_input_b1.txt'));
+>>insert into tlob130ext values(2, stringtolob('second lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt'));
--- 1 row(s) inserted.
->>insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///lobs/lob_input_c1.txt'),externaltolob('hdfs:///lobs/lob_input_c1.txt'));
+>>insert into tlob130ext values(3, stringtolob('third lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_c1.txt'));
--- 1 row(s) inserted.
+>>
>>--negative cases
->>insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130ext values(2, externaltolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
*** ERROR[1432] Input LOB type 8 does not match column's storage type: 2 Column name: C2 .
*** ERROR[8822] The statement was not prepared.
->>insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),filetolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130ext values(3, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
*** ERROR[1432] Input LOB type 2 does not match column's storage type: 8 Column name: C4 .
*** ERROR[8822] The statement was not prepared.
->>update tlob130ext set c4=stringtolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
+>>update tlob130ext set c4=stringtolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
*** ERROR[1432] Input LOB type 2 does not match column's storage type: 8 Column name: C4 .
*** ERROR[8822] The statement was not prepared.
->>update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_a1.txt', append) where c1=1;
+>>update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt', append) where c1=1;
*** ERROR[15001] A syntax error occurred at or before:
-update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_a1.txt', append)
- ^ (79 characters from start of SQL statement)
+update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_a
+1.txt', append) where c1=1;
+ ^ (94 characters from start of SQL statement)
*** ERROR[8822] The statement was not prepared.
->>update tlob130ext set c3=externaltolob('hdfs:///lobs/lob_input_b1.txt') where c1=1;
+>>update tlob130ext set c3=externaltolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt') where c1=1;
*** ERROR[1432] Input LOB type 8 does not match column's storage type: 2 Column name: C3 .
@@ -1239,18 +1226,18 @@ update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_a1.txt', append)
>>delete from tlob130ext where c1=1;
--- 1 row(s) deleted.
->>insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130ext values(1, stringtolob('first lob'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
*** ERROR[1432] Input LOB type 8 does not match column's storage type: 2 Column name: C3 .
*** ERROR[8822] The statement was not prepared.
->>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),externaltolob('hdfs:///lobs/lob_input_a1.txt'));
+>>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),externaltolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'));
--- 1 row(s) inserted.
>>-- the next one should see error 8557
->>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///lobs/lob_input_a1.txt'),
-+>externaltolob('hdfs:///lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
+>>insert into tlob130ext values(1, stringtolob('first lob'), filetolob('hdfs:///user/trafodion/lobs/lob_input_a1.txt'),
++>externaltolob('hdfs:///user/trafodion/lobs/reallyLongDirectoryName0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789/lob_input_a1012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt'));
*** ERROR[8557] The file name passed to externaltolob exceeds 256 bytes.
@@ -1268,7 +1255,8 @@ second lob The cat and the fiddle,
The cat and the fiddle,
third lob The cow jumped over the moon.
- The cow jumped over the moon.
+ The cow jumped over the moon.
+
--- 3 row(s) selected.
>>update tlob130ext set c3=stringtolob('can allow this') where c1=1;
@@ -1285,13 +1273,14 @@ second lob The cat and the fiddle,
The cat and the fiddle,
third lob The cow jumped over the moon.
- The cow jumped over the moon.
+ The cow jumped over the moon.
+
--- 3 row(s) selected.
>>
>>
>>
->>update tlob130ext set c4=externaltolob('hdfs:///lobs/lob_input_d1.txt') where c1=2;
+>>update tlob130ext set c4=externaltolob('hdfs:///user/trafodion/lobs/lob_input_d1.txt') where c1=2;
--- 1 row(s) updated.
>>select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c4,50) from tlob130ext;
@@ -1307,12 +1296,13 @@ To see such sport,
And th
third lob The cow jumped over the moon.
- The cow jumped over the moon.
+ The cow jumped over the moon.
+
--- 3 row(s) selected.
>>
>>
->>update tlob130ext set c2=filetolob('hdfs:///lobs/lob_input_b1.txt') where c1=2;
+>>update tlob130ext set c2=filetolob('hdfs:///user/trafodion/lobs/lob_input_b1.txt') where c1=2;
--- 1 row(s) updated.
>>select lobtostring(c2,50),lobtostring(c3,50),lobtostring(c3,50) from tlob130ext;
@@ -1337,13 +1327,13 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130EXT
=========================
ColumnName : C2
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643314774_0001
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498099204_0001
LOB EOD : 70
LOB Used Len : 42
ColumnName : C3
- Lob Location : /lobs
- LOB Data File: LOBP_00183086508643314774_0002
+ Lob Location : /user/trafodion/lobs
+ LOB Data File: LOBP_00452177305498099204_0002
LOB EOD : 125
LOB Used Len : 68
ColumnName : C4
@@ -1358,8 +1348,8 @@ Lob Information for table: "TRAFODION".LOB130.TLOB130EXT
CATALOG_NAME SCHEMA_NAME OBJECT_NAME COLUMN_NAME
LOB_LOCATION LOB_DATA_FILE LOB_DATA_FILE_SIZE_EOD LOB_DATA_FILE_SIZE_USED
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- ---------------------- -----------------------
-TRAFODION LOB130 TLOB130EXT C2
/lobs LOBP_00183086508643314774_0001 70 42
-TRAFODION LOB130 TLOB130EXT C3
/lobs LOBP_00183086508643314774_0002 125 68
+TRAFODION LOB130 TLOB130EXT C2
/user/trafodion/lobs LOBP_00452177305498099204_0001 70 42
+TRAFODION LOB130 TLOB130EXT C3
/user/trafodion/lobs LOBP_00452177305498099204_0002 125 68
TRAFODION LOB130
<TRUNCATED>