You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by nh...@apache.org on 2015/12/01 18:28:05 UTC

[5/7] incubator-hawq git commit: HAWQ-185. Remove unused PXF regression tests

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/input/pxf_hdfs_writable.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/pxf_hdfs_writable.source b/src/test/regress/input/pxf_hdfs_writable.source
deleted file mode 100644
index b87245c..0000000
--- a/src/test/regress/input/pxf_hdfs_writable.source
+++ /dev/null
@@ -1,811 +0,0 @@
---
--- PXF writable regression suite 
---
--- Prerequisites:
---
---   Must have a running hdfs with REST service on port 50070
---   Must have HADOOP_ROOT set.
-
--- start_matchsubs
---
--- # create a match/subs expression to handle ip addresses that change
---
--- m/(ERROR|WARNING):.*remote component error.*\(\d+\).*from.*'\d+\.\d+\.\d+\.\d+:\d+'.*/
--- s/'\d+\.\d+\.\d+\.\d+:\d+'/'SOME_IP:SOME_PORT'/
---
--- m/(remote component error \(0\): Failed connect to @hostname@:12345; Connection refused|remote component error \(0\): couldn't connect to host).*/
--- s/(Failed connect to @hostname@:12345; Connection refused|couldn't connect to host)/CURL_CON_ERROR/
---
--- end_matchsubs
--- start_matchignore
---
--- m/.*Unable to load native-hadoop library for your platform.*/
---
--- end_matchignore
-
---------------------------------------------------------------------------------
--- WRITABLE
---------------------------------------------------------------------------------
---
--- 0. syntax validations
---
-CREATE WRITABLE EXTERNAL TABLE pxf_out(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.AvroResolver&DATA-SCHEMA=MySchema')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- positive
-
-CREATE WRITABLE EXTERNAL TABLE pxf_out1(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/somepath/gpdb_regression_data?someuseropt=someuserval')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative
-
-CREATE WRITABLE EXTERNAL TABLE pxf_out2(a int, b text, c bytea)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/*')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import'); -- negative
-
-DROP EXTERNAL TABLE pxf_out;
-
---
--- Load HDFS with test data
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -mkdir /gpdb_regression_data
---
--- 1. Test writable table with TEXT format 
---
-CREATE WRITABLE EXTERNAL TABLE wrtext (s1 text, 
-                               		   n1 int, 
-							   		   n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrtext?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-INSERT INTO wrtext VALUES ('first',1,11), 
-                          ('second',2,22);
-INSERT INTO wrtext VALUES ('third',3,33),
-                          ('fourth',4,44);
-INSERT INTO wrtext VALUES ('fifth',5,55),
-                          ('sixth',6,66);
-INSERT INTO wrtext VALUES ('seventh',7,77),
-                          ('eighth',8,88);
-INSERT INTO wrtext VALUES ('ninth',9,99),
-                          ('tenth',10,1010);
-INSERT INTO wrtext VALUES ('eleventh',11,1111),
-                          ('twelfth',12,1212);
-INSERT INTO wrtext VALUES ('thirteenth',13,1313),
-                          ('fourteenth',14,1414), 
-                          ('fifteenth',15,1515);
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrtext | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrtext | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrtext | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readtext (s1 text, 
-                               		     n1 int, 
-							   		     n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrtext?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT * FROM readtext ORDER BY n1;
---
--- 1.1 Insert into writable table from read table
---
-INSERT INTO wrtext SELECT * FROM readtext WHERE n1<=10;
-INSERT INTO wrtext SELECT * FROM readtext WHERE n2<100;
-SELECT * FROM readtext ORDER BY n1;
---
--- Cleanup
---
-DROP EXTERNAL TABLE wrtext;
-DROP EXTERNAL TABLE readtext;
---
--- 2. Test writable table with TEXT format - compressions
---
---
--- 2.1 Gzip compression
---
-CREATE WRITABLE EXTERNAL TABLE wrgzip (s1 text, 
-                               		   n1 int, 
-							   		   n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrgzip?ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver&COMPRESSION_CODEC=org.apache.hadoop.io.compress.GzipCodec')
-FORMAT 'TEXT' (DELIMITER ',');
-COPY wrgzip FROM STDIN CSV;
-I,1,11
-am,2,22
-Gzipped,3,33
-I,1,11
-am,2,22
-Gzipped,3,33
-I,1,11
-am,2,22
-Gzipped,3,33
-I,1,11
-am,2,22
-Gzipped,3,33
-I,1,11
-am,2,22
-Gzipped,3,33
-I,1,11
-am,2,22
-Gzipped,3,33
-\.
-COPY wrgzip FROM STDIN DELIMITER ':';
-OOF:4:44
-OOF:5:55
-\.
-DROP EXTERNAL TABLE wrgzip;
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrgzip | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrgzip | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrgzip | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readgzip (s1 text, 
-                               		     n1 int, 
-							   		     n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrgzip?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT * FROM readgzip ORDER BY n1;
-DROP EXTERNAL TABLE readgzip;
-
---
--- 2.2 Default compression (.deflate)
---
-CREATE WRITABLE EXTERNAL TABLE wrdefault (s1 text, 
-                               		      n1 int, 
-							   		      n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrdefault?ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver&COMPRESSION_CODEC=org.apache.hadoop.io.compress.DefaultCodec')
-FORMAT 'TEXT' (DELIMITER ',');
-INSERT INTO wrdefault VALUES ('I',1,11), ('am',2,22), ('deflated',3,33);
-DROP EXTERNAL TABLE wrdefault;
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrdefault | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrdefault | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrdefault | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readdefault (s1 text, 
-                               		        n1 int, 
-							   		        n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrdefault?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT * FROM readdefault ORDER BY n1;
-DROP EXTERNAL TABLE readdefault;
-
---
--- 2.3 BZip2 compression
---
-CREATE WRITABLE EXTERNAL TABLE wrbzip2 (s1 text, 
-                               		    n1 int, 
-							   		    n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrbzip2?PROFILE=HdfsTextSimple&COMPRESSION_CODEC=org.apache.hadoop.io.compress.BZip2Codec')
-FORMAT 'TEXT' (DELIMITER ',');
-CREATE TEMP TABLE data_for_bzip2 (s1 text,
-                                  n1 int,
-                                  n2 int);
-INSERT INTO data_for_bzip2 VALUES ('I',1,10),
-                                  (' ',2,20), 
-                                  ('a',3,30),
-                                  ('m',4,40),
-                                  (' ',5,50),
-                                  ('B',6,60),
-                                  ('Z',7,70),
-                                  ('i',8,80),
-                                  ('p',9,90),
-                                  ('2',10,100), 
-                                  ('!!!',11,110);
-INSERT INTO wrbzip2 SELECT * FROM data_for_bzip2;
-DROP TABLE data_for_bzip2;
-DROP EXTERNAL TABLE wrbzip2;
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrbzip2 | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrbzip2 | awk '{ if ($1>0) print "ok";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readbzip2 (s1 text, 
-                               		      n1 int, 
-							   		      n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrbzip2?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT' (DELIMITER ',');
-SELECT * FROM readbzip2 ORDER BY n1;
-
-DROP EXTERNAL TABLE readbzip2;
-
---
--- 3. Test writable table with custom format into a sequence file
---
-
---
--- 3.0 Test sequence file - no compression
---
-CREATE WRITABLE EXTERNAL TABLE wrseq (tmp1  timestamp, 
-                                      num1  integer, 
-                                      num2  integer, 
-                                      num3  integer, 
-                                      num4  integer,
-                                      t1    text, 
-                                      t2    text, 
-                                      t3    text, 
-                                      t4    text, 
-                                      t5    text, 
-                                      t6    text, 
-                                      dub1  double precision, 
-                                      dub2  double precision, 
-                                      dub3  double precision, 
-                                      ft1   real, 
-                                      ft2   real, 
-                                      ft3   real, 
-                                      ln1   bigint, 
-                                      ln2   bigint, 
-                                      ln3   bigint, 
-                                      bt    bytea,
-                                      bool1 boolean,
-							          bool2 boolean,
-                                      bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrcustom?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseq FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
-DROP EXTERNAL TABLE wrseq;
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrcustom | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrcustom | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrcustom | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseq (tmp1  timestamp, 
-                                        num1  integer, 
-                                        num2  integer, 
-                                        num3  integer, 
-                                        num4  integer,
-                                        t1    text, 
-                                        t2    text, 
-                                        t3    text, 
-                                        t4    text, 
-                                        t5    text, 
-                                        t6    text, 
-                                        dub1  double precision, 
-                                        dub2  double precision, 
-                                        dub3  double precision, 
-                                        ft1   real, 
-                                        ft2   real, 
-                                        ft3   real, 
-                                        ln1   bigint, 
-                                        ln2   bigint, 
-                                        ln3   bigint, 
-                                        bt    bytea,
-                                        bool1 boolean,
-							            bool2 boolean,
-                                        bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrcustom?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseq ORDER BY num1;
-DROP EXTERNAL TABLE readseq;
-
--- start_ignore
---
--- 3.1 Test sequence file compression - gzip codec, no compression type
---
-CREATE WRITABLE EXTERNAL TABLE wrseqgzip (tmp1  timestamp, 
-                                          num1  integer, 
-                                          num2  integer, 
-                                          num3  integer, 
-                                          num4  integer,
-                                          t1    text, 
-                                          t2    text, 
-                                          t3    text, 
-                                          t4    text, 
-                                          t5    text, 
-                                          t6    text, 
-                                          dub1  double precision, 
-                                          dub2  double precision, 
-                                          dub3  double precision, 
-                                          ft1   real, 
-                                          ft2   real, 
-                                          ft3   real, 
-                                          ln1   bigint, 
-                                          ln2   bigint, 
-                                          ln3   bigint, 
-                                          bt    bytea,
-                                          bool1 boolean,
-							              bool2 boolean,
-                                          bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgzip?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.GzipCodec')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqgzip FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqgzip | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqgzip | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqgzip | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqgzip (LIKE wrseqgzip)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgzip?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqgzip ORDER BY num1;
-DROP EXTERNAL TABLE readseqgzip;
-DROP EXTERNAL TABLE wrseqgzip;
-
---
--- 3.2 Test sequence file compression - gzip codec, record compression
---
-CREATE WRITABLE EXTERNAL TABLE wrseqgziprecord (tmp1  timestamp, 
-                                                num1  integer, 
-                                                num2  integer, 
-                                                num3  integer, 
-                                                num4  integer,
-                                                t1    text, 
-                                                t2    text, 
-                                                t3    text, 
-                                                t4    text, 
-                                                t5    text, 
-                                                t6    text, 
-                                                dub1  double precision, 
-                                                dub2  double precision, 
-                                                dub3  double precision, 
-                                                ft1   real, 
-                                                ft2   real, 
-                                                ft3   real, 
-                                                ln1   bigint, 
-                                                ln2   bigint, 
-                                                ln3   bigint, 
-                                                bt    bytea,
-                                                bool1 boolean,
-							                    bool2 boolean,
-                                                bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgziprecord?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.GzipCodec&COMPRESSION_TYPE=RECORD')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqgziprecord FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqgziprecord | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqgziprecord | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqgziprecord | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqgziprecord (LIKE wrseqgziprecord)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgziprecord?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqgziprecord ORDER BY num1;
-DROP EXTERNAL TABLE readseqgziprecord;
-DROP EXTERNAL TABLE wrseqgziprecord;
-
---
--- 3.3 Test sequence file compression - gzip codec, block compression
---
-CREATE WRITABLE EXTERNAL TABLE wrseqgzipblock (tmp1  timestamp, 
-                                               num1  integer, 
-                                               num2  integer, 
-                                               num3  integer, 
-                                               num4  integer,
-                                               t1    text, 
-                                               t2    text, 
-                                               t3    text, 
-                                               t4    text, 
-                                               t5    text, 
-                                               t6    text, 
-                                               dub1  double precision, 
-                                               dub2  double precision, 
-                                               dub3  double precision, 
-                                               ft1   real, 
-                                               ft2   real, 
-                                               ft3   real, 
-                                               ln1   bigint, 
-                                               ln2   bigint, 
-                                               ln3   bigint, 
-                                               bt    bytea,
-                                               bool1 boolean,
-							                   bool2 boolean,
-                                               bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgzipblock?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.GzipCodec&COMPRESSION_TYPE=BLOCK')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqgzipblock FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqgzipblock | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqgzipblock | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqgzipblock | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqgzipblock (LIKE wrseqgzipblock)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqgzipblock?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqgzipblock ORDER BY num1;
-DROP EXTERNAL TABLE readseqgzipblock;
-DROP EXTERNAL TABLE wrseqgzipblock;
--- end_ignore
-
---
--- 3.4 Test sequence file compression - deflate codec, no compression type
---
-CREATE WRITABLE EXTERNAL TABLE wrseqdeflate (tmp1  timestamp, 
-                                             num1  integer, 
-                                             num2  integer, 
-                                             num3  integer, 
-                                             num4  integer,
-                                             t1    text, 
-                                             t2    text, 
-                                             t3    text, 
-                                             t4    text, 
-                                             t5    text, 
-                                             t6    text, 
-                                             dub1  double precision, 
-                                             dub2  double precision, 
-                                             dub3  double precision, 
-                                             ft1   real, 
-                                             ft2   real, 
-                                             ft3   real, 
-                                             ln1   bigint, 
-                                             ln2   bigint, 
-                                             ln3   bigint, 
-                                             bt    bytea,
-                                             bool1 boolean,
-							                 bool2 boolean,
-                                             bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflate?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.DefaultCodec')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqdeflate FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqdeflate | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqdeflate | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqdeflate | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqdeflate (LIKE wrseqdeflate)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflate?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqdeflate ORDER BY num1;
-DROP EXTERNAL TABLE readseqdeflate;
-DROP EXTERNAL TABLE wrseqdeflate;
-
---
--- 3.5 Test sequence file compression - deflate codec, record compression
---
-CREATE WRITABLE EXTERNAL TABLE wrseqdeflaterecord (tmp1  timestamp, 
-                                                   num1  integer, 
-                                                   num2  integer, 
-                                                   num3  integer, 
-                                                   num4  integer,
-                                                   t1    text, 
-                                                   t2    text, 
-                                                   t3    text, 
-                                                   t4    text, 
-                                                   t5    text, 
-                                                   t6    text, 
-                                                   dub1  double precision, 
-                                                   dub2  double precision, 
-                                                   dub3  double precision, 
-                                                   ft1   real, 
-                                                   ft2   real, 
-                                                   ft3   real, 
-                                                   ln1   bigint, 
-                                                   ln2   bigint, 
-                                                   ln3   bigint, 
-                                                   bt    bytea,
-                                                   bool1 boolean,
-							                       bool2 boolean,
-                                                   bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflaterecord?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.DefaultCodec&COMPRESSION_TYPE=RECORD')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqdeflaterecord FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqdeflaterecord | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqdeflaterecord | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqdeflaterecord | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqdeflaterecord (LIKE wrseqdeflaterecord)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflaterecord?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqdeflaterecord ORDER BY num1;
-DROP EXTERNAL TABLE readseqdeflaterecord;
-DROP EXTERNAL TABLE wrseqdeflaterecord;
-
---
--- 3.6 Test sequence file compression - deflate codec, block compression
---
-CREATE WRITABLE EXTERNAL TABLE wrseqdeflateblock (tmp1  timestamp, 
-                                                  num1  integer, 
-                                                  num2  integer, 
-                                                  num3  integer, 
-                                                  num4  integer,
-                                                  t1    text, 
-                                                  t2    text, 
-                                                  t3    text, 
-                                                  t4    text, 
-                                                  t5    text, 
-                                                  t6    text, 
-                                                  dub1  double precision, 
-                                                  dub2  double precision, 
-                                                  dub3  double precision, 
-                                                  ft1   real, 
-                                                  ft2   real, 
-                                                  ft3   real, 
-                                                  ln1   bigint, 
-                                                  ln2   bigint, 
-                                                  ln3   bigint, 
-                                                  bt    bytea,
-                                                  bool1 boolean,
-							                      bool2 boolean,
-                                                  bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflateblock?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable&COMPRESSION_CODEC=org.apache.hadoop.io.compress.DefaultCodec&COMPRESSION_TYPE=BLOCK')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY wrseqdeflateblock FROM '@abs_srcdir@/data/pxf/customwritable_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/wrseqdeflateblock | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/wrseqdeflateblock | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/wrseqdeflateblock | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE readseqdeflateblock (LIKE wrseqdeflateblock)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/wrseqdeflateblock?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM readseqdeflateblock ORDER BY num1;
-DROP EXTERNAL TABLE readseqdeflateblock;
-DROP EXTERNAL TABLE wrseqdeflateblock;
-
---
--- 4. Test error in port -- negative
---
-CREATE WRITABLE EXTERNAL TABLE wr_port_err(t1 text,
-                                           a1 integer)
-LOCATION ('pxf://@hostname@:12345/gpdb_regression_data/writable/err?ACCESSOR=com.pivotal.pxf.plugins.hdfs.accessors.TextFileWAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.resolvers.TextWResolver')
-FORMAT 'TEXT' (DELIMITER ',');
-INSERT INTO wr_port_err VALUES ('first',1), ('second',2), ('third',3);
-DROP EXTERNAL TABLE wr_port_err;
-
---
--- 5. Test error in host -- negative
---
-CREATE WRITABLE EXTERNAL TABLE wr_host_err(t1 text,
-                                           a1 integer)
-LOCATION ('pxf://badhostname:50070/gpdb_regression_data/writable/err?ACCESSOR=com.pivotal.pxf.plugins.hdfs.accessors.TextFileWAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.resolvers.TextWResolver')
-FORMAT 'TEXT' (DELIMITER ',');
-INSERT INTO wr_host_err VALUES ('first',1), ('second',2), ('third',3);
-DROP EXTERNAL TABLE wr_host_err;
-
---
--- 6. Test circle type converted to text and back
---
-CREATE WRITABLE EXTERNAL TABLE wr_circle(a1 integer,
-                                         c1 circle)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/circle?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritableWithCircle')
-FORMAT 'custom' (formatter='pxfwritable_export');
-INSERT INTO wr_circle VALUES (1, '<(3,3),9>'), (2, '<(4,4),16>');
-DROP EXTERNAL TABLE wr_circle;
---
--- Test data was written
---
-CREATE EXTERNAL TABLE read_circle(a1 integer,
-                                           c1 circle)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/circle?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritableWithCircle')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM read_circle ORDER BY a1;
-DROP EXTERNAL TABLE read_circle;
-
---
--- 7. Test unsupported type in writable resolver -- negative
---
-CREATE WRITABLE EXTERNAL TABLE wr_char_err(a1 integer,
-                                           c1 char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/err?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritableWithChar')
-FORMAT 'custom' (formatter='pxfwritable_export');
-INSERT INTO wr_char_err VALUES (100, 'a'), (1000, 'b');
-DROP EXTERNAL TABLE wr_char_err;
-
---
--- 8. Test ANALYZE on writable table
---
-SET pxf_enable_stat_collection = true;
-CREATE WRITABLE EXTERNAL TABLE writable_analyze_no_analyzer(s1 text, 
-                               		                        n1 int, 
-							   		                        n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/analyze/noanalyzer?ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver')
-FORMAT 'TEXT';
-ANALYZE writable_analyze_no_analyzer;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'writable_analyze_no_analyzer' AND relpages = 1 AND reltuples = 0;
-INSERT INTO writable_analyze_no_analyzer VALUES ('nothing', 0, 0), ('will', 0, 0), ('happen', 0, 0);
-SELECT COUNT(*) FROM pg_class WHERE relname = 'writable_analyze_no_analyzer' AND relpages = 1 AND reltuples = 0;
-
-CREATE WRITABLE EXTERNAL TABLE writable_analyze_with_analyzer(s1 text, 
-                               		                          n1 int, 
-							   		                          n2 int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/analyze/analyzer?PROFILE=HdfsTextSimple')
-FORMAT 'TEXT';
-ANALYZE writable_analyze_with_analyzer;
-SELECT COUNT(*) FROM pg_class WHERE relname = 'writable_analyze_with_analyzer' AND relpages = 1 AND reltuples = 0;
-INSERT INTO writable_analyze_with_analyzer VALUES ('nothing', 0, 0), ('will', 0, 0), ('happen', 0, 0);
-SELECT COUNT(*) FROM pg_class WHERE relname = 'writable_analyze_with_analyzer' AND relpages = 1 AND reltuples = 0;
-
---
--- 9. Test COMPRESSION_TYPE = NONE -- negative 
---
-CREATE WRITABLE EXTERNAL TABLE compress_type_none(a1 integer,
-                                                  c1 char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/err?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=SomeClass&COMPRESSION_TYPE=NONE')
-FORMAT 'custom' (formatter='pxfwritable_export');
-INSERT INTO compress_type_none VALUES (100, 'a'), (1000, 'b');
-DROP EXTERNAL TABLE compress_type_none;
-
---
--- 10. Test CSV format 
---
-CREATE WRITABLE EXTERNAL TABLE writable_csv(number int,
-                                            name text,
-                                            position char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/csv?ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver')
-FORMAT 'CSV';
-INSERT INTO writable_csv VALUES (4, 'H Waldman', 'P'),
-                                (6, 'Papi Turgeman', 'S'),
-                                (13, 'Radisav Ćurčić', 'C'),
-                                (11, 'Derrick Hamilton', 'F'),
-                                (15, 'Kenny Williams', 'F');
-DROP EXTERNAL TABLE writable_csv;
---
--- Test data was written
---
-CREATE EXTERNAL TABLE readable_csv(number int,
-                                   name text,
-                                   position char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/csv?PROFILE=HdfsTextSimple')
-FORMAT 'CSV';
-SELECT * FROM readable_csv ORDER BY number;
-DROP EXTERNAL TABLE readable_csv;
-
---
--- 11. Test THREAD-SAFE parameter
---
-CREATE WRITABLE EXTERNAL TABLE writable_threadsafe(number int,
-                                            	   name text,
-                                            	   position char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/thread?ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver&THREAD-SAFE=FALSE')
-FORMAT 'CSV';
-INSERT INTO writable_threadsafe VALUES (4, 'H Waldman', 'P'),
-                                       (6, 'Papi Turgeman', 'S'),
-                                       (13, 'Radisav Ćurčić', 'C'),
-                                       (11, 'Derrick Hamilton', 'F'),
-                                       (15, 'Kenny Williams', 'F');
-INSERT INTO writable_threadsafe VALUES (8, 'Doron Shefa', 'S'),
-                                       (13, 'Erez Katz', 'P');
-DROP EXTERNAL TABLE writable_threadsafe;
---
--- Test data was written
---
-CREATE EXTERNAL TABLE readable_threadsafe(number int,
-                                          name text,
-                                          position char)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/thread?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.LineBreakAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.StringPassResolver&THREAD-SAFE=FALSE')
-FORMAT 'CSV';
-SELECT * FROM readable_threadsafe ORDER BY number, name;
-DROP EXTERNAL TABLE readable_threadsafe;
-
---
--- 12. Test recordkey for sequence file
---
-
---
--- 12.1 recordkey of type text
--- 
-CREATE WRITABLE EXTERNAL TABLE writable_recordkey_text(recordkey text,
-                                                       tmp1  timestamp, 
-                                                       num1  integer, 
-                                                       num2  integer, 
-                                                       num3  integer, 
-                                                       num4  integer,
-                                                       t1    text, 
-                                                       t2    text, 
-                                                       t3    text, 
-                                                       t4    text, 
-                                                       t5    text, 
-                                                       t6    text, 
-                                                       dub1  double precision, 
-                                                       dub2  double precision, 
-                                                       dub3  double precision, 
-                                                       ft1   real, 
-                                                       ft2   real, 
-                                                       ft3   real, 
-                                                       ln1   bigint, 
-                                                       ln2   bigint, 
-                                                       ln3   bigint, 
-                                                       bt    bytea,
-                                                       bool1 boolean,
-                                                       bool2 boolean,
-                                                       bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/recordkey_text?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY writable_recordkey_text FROM '@abs_srcdir@/data/pxf/customwritable_recordkey_data.txt';
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/recordkey_text | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/recordkey_text | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/recordkey_text | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE read_recordkey_text (LIKE writable_recordkey_text)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/recordkey_text?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM read_recordkey_text ORDER BY num1 LIMIT 10;
-DROP EXTERNAL TABLE writable_recordkey_text;
-DROP EXTERNAL TABLE read_recordkey_text;
-
---
--- 12.2 recordkey of type int (one row with error)
--- 
-CREATE WRITABLE EXTERNAL TABLE writable_recordkey_int(recordkey int,
-                                                      tmp1  timestamp, 
-                                                      num1  integer, 
-                                                      num2  integer, 
-                                                      num3  integer, 
-                                                      num4  integer,
-                                                      t1    text, 
-                                                      t2    text, 
-                                                      t3    text, 
-                                                      t4    text, 
-                                                      t5    text, 
-                                                      t6    text, 
-                                                      dub1  double precision, 
-                                                      dub2  double precision, 
-                                                      dub3  double precision, 
-                                                      ft1   real, 
-                                                      ft2   real, 
-                                                      ft3   real, 
-                                                      ln1   bigint, 
-                                                      ln2   bigint, 
-                                                      ln3   bigint, 
-                                                      bt    bytea,
-                                                      bool1 boolean,
-			             		    		          bool2 boolean,
-                                                      bool3 boolean)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/recordkey_int?ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_export');
-COPY writable_recordkey_int FROM '@abs_srcdir@/data/pxf/customwritable_recordkey_data.txt'
-SEGMENT REJECT LIMIT 5 ROWS;
---
--- Test data was written
---
-\! ${HADOOP_ROOT}/bin/hdfs dfs -ls /gpdb_regression_data/writable/recordkey_int | grep Found | awk '{ if ($2>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du -s /gpdb_regression_data/writable/recordkey_int | awk '{ if ($1>0) print "ok";}'
-\! ${HADOOP_ROOT}/bin/hdfs dfs -du /gpdb_regression_data/writable/recordkey_int | awk '{if ($1<=0) print $0,": error";}'
---
--- Read data
---
-CREATE READABLE EXTERNAL TABLE read_recordkey_int (LIKE writable_recordkey_int)
-LOCATION ('pxf://@hostname@:50070/gpdb_regression_data/writable/recordkey_int?FRAGMENTER=com.pivotal.pxf.plugins.hdfs.HdfsDataFragmenter&ACCESSOR=com.pivotal.pxf.plugins.hdfs.SequenceFileAccessor&RESOLVER=com.pivotal.pxf.plugins.hdfs.WritableResolver&DATA-SCHEMA=CustomWritable')
-FORMAT 'custom' (formatter='pxfwritable_import');
-SELECT * FROM read_recordkey_int ORDER BY num1;
-DROP EXTERNAL TABLE writable_recordkey_int;
-DROP EXTERNAL TABLE read_recordkey_int;
-
---
--- Cleanup: delete all data that was written into hdfs
---
--- start_ignore
-\! ${HADOOP_ROOT}/bin/hdfs dfs -rm -r /gpdb_regression_data
--- end_ignore

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7eeeec9d/src/test/regress/input/pxf_hive.source
----------------------------------------------------------------------
diff --git a/src/test/regress/input/pxf_hive.source b/src/test/regress/input/pxf_hive.source
deleted file mode 100644
index e7511cc..0000000
--- a/src/test/regress/input/pxf_hive.source
+++ /dev/null
@@ -1,251 +0,0 @@
---
--- PXF HIVE regression suite 
---
--- Prerequisites:
---
---   Must have a running hdfs with REST service on port 50070
---   Must have a running YARN services (to load data into Hive).
---   Must have HADOOP_ROOT, HBASE_ROOT, HIVE_ROOT and ZOOKEEPER_ROOT set.
---   PATH=${PATH}:${HADOOP_ROOT}/bin/
---
--- TODO: test gpdbwritable write/read when it is enabled.
--- TODO: test PB, AVRO, THRIFT when it is enabled (read only, with pre formatted files).
--- TODO: test protocol validator for pxf once written.
--- TODO: test parameter passing, filter passing
--- start_matchsubs
---                                                                                               
--- # create a match/subs expression to handle ip addresses that change
---
--- m/(ERROR|WARNING):.*remote component error.*\(\d+\).*from.*'\d+\.\d+\.\d+\.\d+:\d+'.*/
--- s/'\d+\.\d+\.\d+\.\d+:\d+'/'SOME_IP:SOME_PORT'/
---
--- end_matchsubs
---------------------------------------------------------------------------------
--- HIVE
---------------------------------------------------------------------------------
---
--- 1. Testing Hive support for the primitive data types
-\! ${HIVE_ROOT}/bin/hive -e "create table hive_types (s1 string,s2 string,n1 int, d1 double, dc1 decimal, tm timestamp, f float, bg bigint, b boolean)row format delimited fields terminated by ','" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_types.txt' into table hive_types" 2>/dev/null
-
-CREATE EXTERNAL TABLE hawq_types(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision,
-dec1  numeric,
-tm timestamp,
-r real,
-bg bigint,
-b boolean)
-LOCATION ('pxf://@hostname@:50070/hive_types?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hawq_types order by t1;
-
--- 2. Hive table stored as text
-\! ${HIVE_ROOT}/bin/hive -e "create table reg_txt (s1 string,s2 string,n1 int, d1 double)row format delimited fields terminated by ','" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_small_data.txt' into table reg_txt" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_txt(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision)
-LOCATION ('pxf://@hostname@:50070/reg_txt?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_txt order by t1;
-
--- 3. Hive table stored as sequence
-\! ${HIVE_ROOT}/bin/hive -e "create table reg_seq (t0 string, t1 string, num1 int, d1 double) row format delimited fields terminated by ',' STORED AS SEQUENCEFILE" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "insert into table reg_seq select * from reg_txt" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_seq(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision)
-LOCATION ('pxf://@hostname@:50070/reg_seq?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_seq order by t1;
-
--- 4. Hive table stored as rcfile
-\! ${HIVE_ROOT}/bin/hive -e "create table reg_rc (t0 string, t1 string, num1 int, d1 double) STORED AS RCFILE" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "insert into table reg_rc select * from reg_txt" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_rc(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision)
-LOCATION ('pxf://@hostname@:50070/reg_rc?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_rc order by t1;
-
--- 5. Hive table stored as orc
-\! ${HIVE_ROOT}/bin/hive -e "create table reg_orc (t0 string, t1 string, num1 int, d1 double) STORED AS ORC" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "insert into table reg_orc select * from reg_txt" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_orc(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision)
-LOCATION ('pxf://@hostname@:50070/reg_orc?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_orc order by t1;
-
--- 6. Hive table stored in several partitions where each partition is stored in a diferrent format
-\! ${HIVE_ROOT}/bin/hive -e "create external table reg_heterogen (s1 string,s2 string,n1 int, d1 double) partitioned by (fmt string)  row format delimited fields terminated by ','" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen add partition (fmt = 'txt') location 'hdfs:/hive/warehouse/reg_txt'" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen add partition (fmt = 'rc') location 'hdfs:/hive/warehouse/reg_rc'" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen add partition (fmt = 'seq') location 'hdfs:/hive/warehouse/reg_seq'" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen add partition (fmt = 'orc') location 'hdfs:/hive/warehouse/reg_orc'" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen partition (fmt='rc') set fileformat RCFILE" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen partition (fmt='seq') set fileformat SEQUENCEFILE" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter table reg_heterogen partition (fmt='orc') set fileformat ORC" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "show partitions reg_heterogen" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_heterogen(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision,
-t3 text)
-LOCATION ('pxf://@hostname@:50070/reg_heterogen?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_heterogen order by t3, t1;
-
--- Test analyze for Hive table.
-ANALYZE hv_heterogen;
-select relpages, reltuples from pg_class where relname = 'hv_heterogen';
-
--- 7. Hive table with collection types (non primitive types)
-\! ${HIVE_ROOT}/bin/hive -e "CREATE TABLE reg_collections ( s1 STRING, f1 FLOAT, a1 ARRAY<STRING> , m1 MAP<STRING,  FLOAT > , sr1 STRUCT<street:STRING,  city:STRING,  state:STRING,  zip:INT > )  ROW FORMAT DELIMITED  FIELDS TERMINATED BY '\001' COLLECTION ITEMS TERMINATED BY '\002' MAP KEYS TERMINATED BY '\003'  LINES TERMINATED BY '\n'  STORED AS TEXTFILE" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_collections.txt' into table reg_collections" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_collections(
-t1    text,   
-f1    real,
-t2    text, 
-t3    text, 
-t4    text,
-t5    text, 
-f2    real,
-t6    text,
-f3    real,
-t7    text,
-t8    text,
-t9    text,
-num1  integer)
-LOCATION ('pxf://@hostname@:50070/reg_collections?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_collections order by t1;
-
--- 8. View - negative test
-\! ${HIVE_ROOT}/bin/hive -e "create view reg_txt_view as select s1 from reg_txt" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_view(
-t1    text)
-LOCATION ('pxf://@hostname@:50070/reg_txt_view?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_view order by t1;
-
--- 9.  Decimal is a partition in the Hive Table
-\! ${HIVE_ROOT}/bin/hive -e "create table part_dec (s1 string,s2 string,n1 int, d1 double) partitioned by (dec decimal)  row format delimited fields terminated by ','" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_small_data.txt' into table part_dec partition (dec = 10.1111111111111)" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_small_data.txt' into table part_dec partition (dec = 10.2222222222222)" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_part_dec(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision,
-dec1  numeric)
-LOCATION ('pxf://@hostname@:50070/part_dec?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_part_dec order by dec1, t1;
-select * from hv_part_dec where dec1 = 10.2222222222222 order by t1;
-
--- 10.  Unknown Type - negative test
-\! ${HIVE_ROOT}/bin/hive -e "create table un_supported_tbl (s1 string,s2 string,n1 tinyint, n2 int)row format delimited fields terminated by ','" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "load data local inpath '@abs_srcdir@/data/pxf/hive_small_data.txt' into table un_supported_tbl" 2>/dev/null
-
-CREATE EXTERNAL TABLE hv_un_supported_tbl(
-t1    text,
-t2    text,  
-num1  integer, 
-num2  integer)
-LOCATION ('pxf://@hostname@:50070/un_supported_tbl?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-select * from hv_un_supported_tbl;
-
--- 11.  Create external table with Profile option
-CREATE EXTERNAL TABLE tbl_with_profile(
-t1    text,
-t2    text,  
-num1  integer, 
-dub1  double precision)
-LOCATION ('pxf://@hostname@:50070/reg_seq?PROFILE=HIVE')
-format 'custom' (formatter='pxfwritable_import');
-select * from tbl_with_profile order by t1;
-
--- 12. Test analyze for Hive table with profile without analyzer - negative test
-CREATE EXTERNAL TABLE hv_heterogen_profile_wo_analyzer(
-t1    text,
-t2    text,
-num1  integer,
-dub1  double precision,
-t3 text)
-LOCATION ('pxf://@hostname@:50070/reg_heterogen?PROFILE=Hive')
-format 'custom' (formatter='pxfwritable_import');
-ANALYZE hv_heterogen_profile_wo_analyzer;
-
--- 13. Index table
-\! ${HIVE_ROOT}/bin/hive -e "create index reg_txt_index on table reg_txt (s1) as 'COMPACT' with deferred rebuild" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "alter index reg_txt_index on reg_txt rebuild" 2>/dev/null
-CREATE EXTERNAL TABLE hv_index(
-t1    text,
-t2    text,
-n1 bigint)
-LOCATION ('pxf://@hostname@:50070/default__reg_txt_reg_txt_index__?PROFILE=Hive')
-FORMAT 'CUSTOM' (formatter='pxfwritable_import');
-SELECT t1, n1 FROM hv_index ORDER BY t1;
-
--- 14. Create external table with wrong column definitions - negative test
-CREATE EXTERNAL TABLE tbl_with_wrong_col_def(
-t1    text,
-t2    text,
-wrong1  double precision,
-wrong2  smallint)
-LOCATION ('pxf://@hostname@:50070/reg_seq?PROFILE=HIVE')
-format 'custom' (formatter='pxfwritable_import');
-select * from tbl_with_wrong_col_def order by t1;
-
--- 15. Clean after Hive
-drop external table hv_index;
-drop external table hv_view;
-drop external table hv_collections;
-drop external table hv_heterogen;
-drop external table hv_orc;
-drop external table hv_rc;
-drop external table hv_seq;
-drop external table hv_txt;
-drop external table hawq_types;
-drop external table hv_part_dec;
-drop external table hv_un_supported_tbl;
-drop external table tbl_with_profile;
-drop external table hv_heterogen_profile_wo_analyzer;
-drop external table tbl_with_wrong_col_def;
-
-\! ${HIVE_ROOT}/bin/hive -e "drop index reg_index on reg_txt" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table default__reg_txt_reg_txt_index__" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop view reg_txt_view" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_collections" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_heterogen" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_orc" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_rc" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_seq" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table reg_txt" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table hive_types" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table part_dec" 2>/dev/null
-\! ${HIVE_ROOT}/bin/hive -e "drop table un_supported_tbl" 2>/dev/null
\ No newline at end of file