You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/04/14 06:42:02 UTC
[18/19] hive git commit: HIVE-18862: qfiles: prepare .q files for
using datasets (Laszlo Bodor via Zoltan Haindrich)
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_custom_key.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_custom_key.q b/accumulo-handler/src/test/queries/positive/accumulo_custom_key.q
index 6684fd3..ca7e6c7 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_custom_key.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_custom_key.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE accumulo_ck_1(key struct<col1:string,col2:string,col3:string>, value string)
STORED BY 'org.apache.hadoop.hive.accumulo.AccumuloStorageHandler'
WITH SERDEPROPERTIES (
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_custom_key2.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_custom_key2.q b/accumulo-handler/src/test/queries/positive/accumulo_custom_key2.q
index 038633f..615e5f8 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_custom_key2.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_custom_key2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE accumulo_ck_3(key struct<col1:string,col2:string,col3:string>, value string)
STORED BY 'org.apache.hadoop.hive.accumulo.AccumuloStorageHandler'
WITH SERDEPROPERTIES (
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_joins.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_joins.q b/accumulo-handler/src/test/queries/positive/accumulo_joins.q
index b72ec6b..a6b09ab 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_joins.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_joins.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE users;
DROP TABLE states;
DROP TABLE countries;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_predicate_pushdown.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_predicate_pushdown.q b/accumulo-handler/src/test/queries/positive/accumulo_predicate_pushdown.q
index 0f064af..4cb93b5 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_predicate_pushdown.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_predicate_pushdown.q
@@ -1,3 +1,5 @@
+--! qt:dataset:src
+--! qt:dataset:part
CREATE TABLE accumulo_pushdown(key string, value string)
STORED BY 'org.apache.hadoop.hive.accumulo.AccumuloStorageHandler'
WITH SERDEPROPERTIES ("accumulo.columns.mapping" = ":rowid,cf:string");
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_queries.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_queries.q b/accumulo-handler/src/test/queries/positive/accumulo_queries.q
index 0aceaa0..bc93d8a 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_queries.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_queries.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- remove these; after HIVE-18802 is fixed
set hive.optimize.index.filter=false;
set hive.optimize.ppd=false;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/accumulo-handler/src/test/queries/positive/accumulo_single_sourced_multi_insert.q
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/queries/positive/accumulo_single_sourced_multi_insert.q b/accumulo-handler/src/test/queries/positive/accumulo_single_sourced_multi_insert.q
index f904d3f..c12f962 100644
--- a/accumulo-handler/src/test/queries/positive/accumulo_single_sourced_multi_insert.q
+++ b/accumulo-handler/src/test/queries/positive/accumulo_single_sourced_multi_insert.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- HIVE-4375 Single sourced multi insert consists of native and non-native table mixed throws NPE
CREATE TABLE src_x1(key string, value string);
CREATE TABLE src_x2(key string, value string)
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientnegative/case_with_row_sequence.q b/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
index 910ffda..9922092 100644
--- a/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
+++ b/contrib/src/test/queries/clientnegative/case_with_row_sequence.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.exec.submitviachild=true;
set hive.exec.submit.local.task.via.child=true;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientnegative/invalid_row_sequence.q b/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
index 1de9d9e..b797fef 100644
--- a/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
+++ b/contrib/src/test/queries/clientnegative/invalid_row_sequence.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- Verify that a stateful UDF cannot be used outside of the SELECT list
drop temporary function row_sequence;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientnegative/udtf_explode2.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientnegative/udtf_explode2.q b/contrib/src/test/queries/clientnegative/udtf_explode2.q
index 29217eb..f202d77 100644
--- a/contrib/src/test/queries/clientnegative/udtf_explode2.q
+++ b/contrib/src/test/queries/clientnegative/udtf_explode2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/dboutput.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/dboutput.q b/contrib/src/test/queries/clientpositive/dboutput.q
index 2b20738..4798cb2 100644
--- a/contrib/src/test/queries/clientpositive/dboutput.q
+++ b/contrib/src/test/queries/clientpositive/dboutput.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/fileformat_base64.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/fileformat_base64.q b/contrib/src/test/queries/clientpositive/fileformat_base64.q
index 20bbfa8..f52669f 100644
--- a/contrib/src/test/queries/clientpositive/fileformat_base64.q
+++ b/contrib/src/test/queries/clientpositive/fileformat_base64.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
DROP TABLE base64_test;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/java_mr_example.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/java_mr_example.q b/contrib/src/test/queries/clientpositive/java_mr_example.q
index 06911dc..3b82c18 100644
--- a/contrib/src/test/queries/clientpositive/java_mr_example.q
+++ b/contrib/src/test/queries/clientpositive/java_mr_example.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
FROM (
FROM src
MAP value, key
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/lateral_view_explode2.q b/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
index 210946e..edfff44 100644
--- a/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
+++ b/contrib/src/test/queries/clientpositive/lateral_view_explode2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes.q b/contrib/src/test/queries/clientpositive/serde_typedbytes.q
index d0765fd..29e3ba1 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes2.q b/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
index a709558..069cfd4 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes3.q b/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
index 492c576..68917e6 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes3.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes4.q b/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
index bb1f257..49c9d7b 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes4.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes5.q b/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
index f658c46..5978be1 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes5.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
drop table dest1;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q b/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
index 59b757b..404dc65 100644
--- a/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
+++ b/contrib/src/test/queries/clientpositive/serde_typedbytes_null.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
DROP TABLE table1;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_avg.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_avg.q b/contrib/src/test/queries/clientpositive/udaf_example_avg.q
index 0f69926..d18b197 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_avg.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_avg.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q b/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
index 817cf32..56e7164 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_max.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_max.q b/contrib/src/test/queries/clientpositive/udaf_example_max.q
index 7db78a7..2b72791 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_max.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_max.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_max_n.q b/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
index 4cc2878..f4e5596 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_max_n.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_min.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_min.q b/contrib/src/test/queries/clientpositive/udaf_example_min.q
index bd94252..32ae4aa 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_min.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_min.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udaf_example_min_n.q b/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
index 12bf1ec..ca483eb 100644
--- a/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
+++ b/contrib/src/test/queries/clientpositive/udaf_example_min_n.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
set hive.mapred.mode=nonstrict;
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udf_example_add.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udf_example_add.q b/contrib/src/test/queries/clientpositive/udf_example_add.q
index fb7b5c9..e099935 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_add.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_add.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q b/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
index a8ab751..f097958 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src_thrift
-- Suppress vectorization due to known bug. See HIVE-19110.
set hive.vectorized.execution.enabled=false;
set hive.test.vectorized.execution.enabled.override=disable;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udf_example_format.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udf_example_format.q b/contrib/src/test/queries/clientpositive/udf_example_format.q
index edda00b..e42bab7 100644
--- a/contrib/src/test/queries/clientpositive/udf_example_format.q
+++ b/contrib/src/test/queries/clientpositive/udf_example_format.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udf_row_sequence.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udf_row_sequence.q b/contrib/src/test/queries/clientpositive/udf_row_sequence.q
index 44099b1..7a3a386 100644
--- a/contrib/src/test/queries/clientpositive/udf_row_sequence.q
+++ b/contrib/src/test/queries/clientpositive/udf_row_sequence.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- The ORDER BY on the outer query will typically have no effect,
-- but there is really no guarantee that the ordering is preserved
-- across various SQL operators.
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udtf_explode2.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udtf_explode2.q b/contrib/src/test/queries/clientpositive/udtf_explode2.q
index 2bd1359..8c306cf 100644
--- a/contrib/src/test/queries/clientpositive/udtf_explode2.q
+++ b/contrib/src/test/queries/clientpositive/udtf_explode2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/udtf_output_on_close.q b/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
index 87aec5e..08762aa 100644
--- a/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
+++ b/contrib/src/test/queries/clientpositive/udtf_output_on_close.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/contrib/src/test/queries/clientpositive/url_hook.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/url_hook.q b/contrib/src/test/queries/clientpositive/url_hook.q
index 1d56408..512e579 100644
--- a/contrib/src/test/queries/clientpositive/url_hook.q
+++ b/contrib/src/test/queries/clientpositive/url_hook.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
add jar ${system:maven.local.repository}/org/apache/hive/hive-contrib/${system:hive.version}/hive-contrib-${system:hive.version}.jar;
SHOW TABLES 'src';
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/alltypesorc/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/alltypesorc/load.hive.sql b/data/files/datasets/alltypesorc/load.hive.sql
new file mode 100644
index 0000000..87f9d56
--- /dev/null
+++ b/data/files/datasets/alltypesorc/load.hive.sql
@@ -0,0 +1,21 @@
+CREATE TABLE alltypesorc(
+ ctinyint TINYINT,
+ csmallint SMALLINT,
+ cint INT,
+ cbigint BIGINT,
+ cfloat FLOAT,
+ cdouble DOUBLE,
+ cstring1 STRING,
+ cstring2 STRING,
+ ctimestamp1 TIMESTAMP,
+ ctimestamp2 TIMESTAMP,
+ cboolean1 BOOLEAN,
+ cboolean2 BOOLEAN)
+ STORED AS ORC;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
+OVERWRITE INTO TABLE alltypesorc;
+
+ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
+
+ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/alltypesparquet/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/alltypesparquet/load.hive.sql b/data/files/datasets/alltypesparquet/load.hive.sql
new file mode 100644
index 0000000..45d3616
--- /dev/null
+++ b/data/files/datasets/alltypesparquet/load.hive.sql
@@ -0,0 +1,39 @@
+CREATE TABLE alltypesorc_to_parquet(
+ ctinyint TINYINT,
+ csmallint SMALLINT,
+ cint INT,
+ cbigint BIGINT,
+ cfloat FLOAT,
+ cdouble DOUBLE,
+ cstring1 STRING,
+ cstring2 STRING,
+ ctimestamp1 TIMESTAMP,
+ ctimestamp2 TIMESTAMP,
+ cboolean1 BOOLEAN,
+ cboolean2 BOOLEAN)
+ STORED AS ORC;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
+OVERWRITE INTO TABLE alltypesorc_to_parquet;
+
+CREATE TABLE alltypesparquet(
+ ctinyint TINYINT,
+ csmallint SMALLINT,
+ cint INT,
+ cbigint BIGINT,
+ cfloat FLOAT,
+ cdouble DOUBLE,
+ cstring1 STRING,
+ cstring2 STRING,
+ ctimestamp1 TIMESTAMP,
+ ctimestamp2 TIMESTAMP,
+ cboolean1 BOOLEAN,
+ cboolean2 BOOLEAN)
+ STORED AS PARQUET;
+
+INSERT OVERWRITE TABLE alltypesparquet SELECT * FROM alltypesorc_to_parquet;
+
+ANALYZE TABLE alltypesparquet COMPUTE STATISTICS;
+
+ANALYZE TABLE alltypesparquet COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
+DROP TABLE alltypesorc_to_parquet;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/cbo_t1/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/cbo_t1/load.hive.sql b/data/files/datasets/cbo_t1/load.hive.sql
new file mode 100644
index 0000000..257f0bd
--- /dev/null
+++ b/data/files/datasets/cbo_t1/load.hive.sql
@@ -0,0 +1,8 @@
+set hive.cbo.enable=true;
+
+create table cbo_t1(key string, value string, c_int int, c_float float, c_boolean boolean) partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
+
+load data local inpath '${hiveconf:test.data.dir}/cbo_t1.txt' into table cbo_t1 partition (dt='2014');
+
+analyze table cbo_t1 partition (dt) compute statistics;
+analyze table cbo_t1 compute statistics for columns key, value, c_int, c_float, c_boolean;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/cbo_t2/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/cbo_t2/load.hive.sql b/data/files/datasets/cbo_t2/load.hive.sql
new file mode 100644
index 0000000..293bae2
--- /dev/null
+++ b/data/files/datasets/cbo_t2/load.hive.sql
@@ -0,0 +1,8 @@
+set hive.cbo.enable=true;
+
+create table cbo_t2(key string, value string, c_int int, c_float float, c_boolean boolean) partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
+
+load data local inpath '${hiveconf:test.data.dir}/cbo_t2.txt' into table cbo_t2 partition (dt='2014');
+
+analyze table cbo_t2 partition (dt) compute statistics;
+analyze table cbo_t2 compute statistics for columns key, value, c_int, c_float, c_boolean;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/cbo_t3/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/cbo_t3/load.hive.sql b/data/files/datasets/cbo_t3/load.hive.sql
new file mode 100644
index 0000000..6c7c3e0
--- /dev/null
+++ b/data/files/datasets/cbo_t3/load.hive.sql
@@ -0,0 +1,8 @@
+set hive.cbo.enable=true;
+
+create table cbo_t3(key string, value string, c_int int, c_float float, c_boolean boolean) row format delimited fields terminated by ',' STORED AS TEXTFILE;
+
+load data local inpath '${hiveconf:test.data.dir}/cbo_t3.txt' into table cbo_t3;
+
+analyze table cbo_t3 compute statistics;
+analyze table cbo_t3 compute statistics for columns key, value, c_int, c_float, c_boolean;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/lineitem/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/lineitem/load.hive.sql b/data/files/datasets/lineitem/load.hive.sql
new file mode 100644
index 0000000..ab2f5fb
--- /dev/null
+++ b/data/files/datasets/lineitem/load.hive.sql
@@ -0,0 +1,23 @@
+CREATE TABLE lineitem (L_ORDERKEY INT,
+ L_PARTKEY INT,
+ L_SUPPKEY INT,
+ L_LINENUMBER INT,
+ L_QUANTITY DOUBLE,
+ L_EXTENDEDPRICE DOUBLE,
+ L_DISCOUNT DOUBLE,
+ L_TAX DOUBLE,
+ L_RETURNFLAG STRING,
+ L_LINESTATUS STRING,
+ l_shipdate STRING,
+ L_COMMITDATE STRING,
+ L_RECEIPTDATE STRING,
+ L_SHIPINSTRUCT STRING,
+ L_SHIPMODE STRING,
+ L_COMMENT STRING)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '|';
+
+LOAD DATA LOCAL INPATH '${hiveconf:test.data.dir}/lineitem.txt' OVERWRITE INTO TABLE lineitem;
+
+analyze table lineitem compute statistics;
+analyze table lineitem compute statistics for columns;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/part/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/part/load.hive.sql b/data/files/datasets/part/load.hive.sql
new file mode 100644
index 0000000..c24b505
--- /dev/null
+++ b/data/files/datasets/part/load.hive.sql
@@ -0,0 +1,16 @@
+CREATE TABLE part(
+ p_partkey INT,
+ p_name STRING,
+ p_mfgr STRING,
+ p_brand STRING,
+ p_type STRING,
+ p_size INT,
+ p_container STRING,
+ p_retailprice DOUBLE,
+ p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '${hiveconf:test.data.dir}/part_tiny.txt' overwrite into table part;
+
+analyze table part compute statistics;
+analyze table part compute statistics for columns;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src/load.hive.sql b/data/files/datasets/src/load.hive.sql
new file mode 100644
index 0000000..eca7e07
--- /dev/null
+++ b/data/files/datasets/src/load.hive.sql
@@ -0,0 +1,7 @@
+CREATE TABLE src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
+
+ANALYZE TABLE src COMPUTE STATISTICS;
+
+ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src1/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src1/load.hive.sql b/data/files/datasets/src1/load.hive.sql
new file mode 100644
index 0000000..b64d45f
--- /dev/null
+++ b/data/files/datasets/src1/load.hive.sql
@@ -0,0 +1,7 @@
+CREATE TABLE src1 (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
+
+ANALYZE TABLE src1 COMPUTE STATISTICS;
+
+ANALYZE TABLE src1 COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src_cbo/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src_cbo/load.hive.sql b/data/files/datasets/src_cbo/load.hive.sql
new file mode 100644
index 0000000..570c316
--- /dev/null
+++ b/data/files/datasets/src_cbo/load.hive.sql
@@ -0,0 +1,8 @@
+set hive.cbo.enable=true;
+
+CREATE TABLE src_cbo (key STRING, value STRING) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src_cbo;
+
+analyze table src_cbo compute statistics;
+analyze table src_cbo compute statistics for columns;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src_hbase/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src_hbase/load.hive.sql b/data/files/datasets/src_hbase/load.hive.sql
new file mode 100644
index 0000000..5c7b5b7
--- /dev/null
+++ b/data/files/datasets/src_hbase/load.hive.sql
@@ -0,0 +1,14 @@
+CREATE TABLE src_hbase_tmp (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src_hbase_tmp;
+
+ANALYZE TABLE src_hbase_tmp COMPUTE STATISTICS;
+
+ANALYZE TABLE src_hbase_tmp COMPUTE STATISTICS FOR COLUMNS key,value;
+
+CREATE TABLE src_hbase (key INT, value STRING)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ('hbase.columns.mapping' = ':key,cf:val')
+TBLPROPERTIES ('hbase.table.name' = 'src_hbase');
+
+INSERT OVERWRITE TABLE src_hbase SELECT * FROM src_hbase_tmp
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src_json/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src_json/load.hive.sql b/data/files/datasets/src_json/load.hive.sql
new file mode 100644
index 0000000..13e0b96
--- /dev/null
+++ b/data/files/datasets/src_json/load.hive.sql
@@ -0,0 +1,7 @@
+CREATE TABLE src_json (json STRING COMMENT 'default') STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/json.txt" INTO TABLE src_json;
+
+ANALYZE TABLE src_json COMPUTE STATISTICS;
+
+ANALYZE TABLE src_json COMPUTE STATISTICS FOR COLUMNS json;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src_sequencefile/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src_sequencefile/load.hive.sql b/data/files/datasets/src_sequencefile/load.hive.sql
new file mode 100644
index 0000000..6f10562
--- /dev/null
+++ b/data/files/datasets/src_sequencefile/load.hive.sql
@@ -0,0 +1,7 @@
+CREATE TABLE src_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS SEQUENCEFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.seq" INTO TABLE src_sequencefile;
+
+ANALYZE TABLE src_sequencefile COMPUTE STATISTICS;
+
+ANALYZE TABLE src_sequencefile COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/src_thrift/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/src_thrift/load.hive.sql b/data/files/datasets/src_thrift/load.hive.sql
new file mode 100644
index 0000000..1e1f1b5
--- /dev/null
+++ b/data/files/datasets/src_thrift/load.hive.sql
@@ -0,0 +1,10 @@
+CREATE TABLE src_thrift
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer'
+WITH SERDEPROPERTIES (
+ 'serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex',
+ 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
+STORED AS SEQUENCEFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/complex.seq" INTO TABLE src_thrift;
+
+ANALYZE TABLE src_thrift COMPUTE STATISTICS;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/srcbucket/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/srcbucket/load.hive.sql b/data/files/datasets/srcbucket/load.hive.sql
new file mode 100644
index 0000000..4008115
--- /dev/null
+++ b/data/files/datasets/srcbucket/load.hive.sql
@@ -0,0 +1,13 @@
+CREATE TABLE srcbucket_tmp (key INT, value STRING) STORED AS TEXTFILE;
+CREATE TABLE srcbucket (key INT, value STRING)
+CLUSTERED BY (key) INTO 2 BUCKETS
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bucketed_files/000000_0" INTO TABLE srcbucket_tmp;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bucketed_files/000001_0" INTO TABLE srcbucket_tmp;
+INSERT INTO srcbucket SELECT * FROM srcbucket_tmp;
+DROP TABLE srcbucket_tmp;
+
+ANALYZE TABLE srcbucket COMPUTE STATISTICS;
+
+ANALYZE TABLE srcbucket COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/srcbucket2/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/srcbucket2/load.hive.sql b/data/files/datasets/srcbucket2/load.hive.sql
new file mode 100644
index 0000000..de00b2f
--- /dev/null
+++ b/data/files/datasets/srcbucket2/load.hive.sql
@@ -0,0 +1,15 @@
+CREATE TABLE srcbucket_tmp (key INT, value STRING);
+CREATE TABLE srcbucket2 (key INT, value STRING)
+CLUSTERED BY (key) INTO 4 BUCKETS
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000000_0" INTO TABLE srcbucket_tmp;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000001_0" INTO TABLE srcbucket_tmp;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000002_0" INTO TABLE srcbucket_tmp;
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000003_0" INTO TABLE srcbucket_tmp;
+INSERT INTO srcbucket2 SELECT * FROM srcbucket_tmp;
+DROP TABLE srcbucket_tmp;
+
+ANALYZE TABLE srcbucket2 COMPUTE STATISTICS;
+
+ANALYZE TABLE srcbucket2 COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/files/datasets/srcpart/load.hive.sql
----------------------------------------------------------------------
diff --git a/data/files/datasets/srcpart/load.hive.sql b/data/files/datasets/srcpart/load.hive.sql
new file mode 100644
index 0000000..f94d2a3
--- /dev/null
+++ b/data/files/datasets/srcpart/load.hive.sql
@@ -0,0 +1,19 @@
+CREATE TABLE srcpart (key STRING COMMENT 'default', value STRING COMMENT 'default')
+PARTITIONED BY (ds STRING, hr STRING)
+STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="11");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="12");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="11");
+
+LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
+OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
+
+ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS;
+
+ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_druid_init.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_druid_init.sql b/data/scripts/q_test_druid_init.sql
index ee025f1..e69de29 100644
--- a/data/scripts/q_test_druid_init.sql
+++ b/data/scripts/q_test_druid_init.sql
@@ -1,29 +0,0 @@
-set hive.stats.dbclass=fs;
---
--- Table alltypesorc
---
-DROP TABLE IF EXISTS alltypesorc;
-CREATE TABLE alltypesorc(
- ctinyint TINYINT,
- csmallint SMALLINT,
- cint INT,
- cbigint BIGINT,
- cfloat FLOAT,
- cdouble DOUBLE,
- cstring1 STRING,
- cstring2 STRING,
- ctimestamp1 TIMESTAMP,
- ctimestamp2 TIMESTAMP,
- cboolean1 BOOLEAN,
- cboolean2 BOOLEAN)
- STORED AS ORC;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
-OVERWRITE INTO TABLE alltypesorc;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
-
--- Druid Table
-
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init.sql b/data/scripts/q_test_init.sql
index 5d36b6f..a269c55 100644
--- a/data/scripts/q_test_init.sql
+++ b/data/scripts/q_test_init.sql
@@ -1,224 +1,4 @@
set hive.stats.dbclass=fs;
---
--- Table src
---
-DROP TABLE IF EXISTS src;
-
-CREATE TABLE src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
-
-ANALYZE TABLE src COMPUTE STATISTICS;
-
-ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table src1
---
-DROP TABLE IF EXISTS src1;
-
-CREATE TABLE src1 (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
-
-ANALYZE TABLE src1 COMPUTE STATISTICS;
-
-ANALYZE TABLE src1 COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table src_json
---
-DROP TABLE IF EXISTS src_json;
-
-CREATE TABLE src_json (json STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/json.txt" INTO TABLE src_json;
-
-ANALYZE TABLE src_json COMPUTE STATISTICS;
-
-ANALYZE TABLE src_json COMPUTE STATISTICS FOR COLUMNS json;
-
---
--- Table src_sequencefile
---
-DROP TABLE IF EXISTS src_sequencefile;
-
-CREATE TABLE src_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS SEQUENCEFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.seq" INTO TABLE src_sequencefile;
-
-ANALYZE TABLE src_sequencefile COMPUTE STATISTICS;
-
-ANALYZE TABLE src_sequencefile COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table src_thrift
---
-DROP TABLE IF EXISTS src_thrift;
-
-CREATE TABLE src_thrift
-ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer'
-WITH SERDEPROPERTIES (
- 'serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex',
- 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
-STORED AS SEQUENCEFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/complex.seq" INTO TABLE src_thrift;
-
-ANALYZE TABLE src_thrift COMPUTE STATISTICS;
-
---
--- Table srcbucket
---
-DROP TABLE IF EXISTS srcbucket_tmp;
-DROP TABLE IF EXISTS srcbucket;
-
-CREATE TABLE srcbucket_tmp (key INT, value STRING) STORED AS TEXTFILE;
-CREATE TABLE srcbucket (key INT, value STRING)
-CLUSTERED BY (key) INTO 2 BUCKETS
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bucketed_files/000000_0" INTO TABLE srcbucket_tmp;
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bucketed_files/000001_0" INTO TABLE srcbucket_tmp;
-INSERT INTO srcbucket SELECT * FROM srcbucket_tmp;
-DROP TABLE srcbucket_tmp;
-
-ANALYZE TABLE srcbucket COMPUTE STATISTICS;
-
-ANALYZE TABLE srcbucket COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table srcbucket2
---
-DROP TABLE IF EXISTS srcbucket_tmp;
-DROP TABLE IF EXISTS srcbucket2;
-
-CREATE TABLE srcbucket_tmp (key INT, value STRING);
-CREATE TABLE srcbucket2 (key INT, value STRING)
-CLUSTERED BY (key) INTO 4 BUCKETS
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000000_0" INTO TABLE srcbucket_tmp;
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000001_0" INTO TABLE srcbucket_tmp;
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000002_0" INTO TABLE srcbucket_tmp;
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/bmj/000003_0" INTO TABLE srcbucket_tmp;
-INSERT INTO srcbucket2 SELECT * FROM srcbucket_tmp;
-DROP TABLE srcbucket_tmp;
-
-ANALYZE TABLE srcbucket2 COMPUTE STATISTICS;
-
-ANALYZE TABLE srcbucket2 COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table srcpart
---
-DROP TABLE IF EXISTS srcpart;
-
-CREATE TABLE srcpart (key STRING COMMENT 'default', value STRING COMMENT 'default')
-PARTITIONED BY (ds STRING, hr STRING)
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="12");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
-
-ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS;
-
-ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table alltypesorc
---
-DROP TABLE IF EXISTS alltypesorc;
-CREATE TABLE alltypesorc(
- ctinyint TINYINT,
- csmallint SMALLINT,
- cint INT,
- cbigint BIGINT,
- cfloat FLOAT,
- cdouble DOUBLE,
- cstring1 STRING,
- cstring2 STRING,
- ctimestamp1 TIMESTAMP,
- ctimestamp2 TIMESTAMP,
- cboolean1 BOOLEAN,
- cboolean2 BOOLEAN)
- STORED AS ORC;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
-OVERWRITE INTO TABLE alltypesorc;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
-
---
--- Table alltypesparquet
---
-DROP TABLE IF EXISTS alltypesparquet;
-CREATE TABLE alltypesparquet(
- ctinyint TINYINT,
- csmallint SMALLINT,
- cint INT,
- cbigint BIGINT,
- cfloat FLOAT,
- cdouble DOUBLE,
- cstring1 STRING,
- cstring2 STRING,
- ctimestamp1 TIMESTAMP,
- ctimestamp2 TIMESTAMP,
- cboolean1 BOOLEAN,
- cboolean2 BOOLEAN)
- STORED AS PARQUET;
-
-INSERT OVERWRITE TABLE alltypesparquet SELECT * FROM alltypesorc;
-
-ANALYZE TABLE alltypesparquet COMPUTE STATISTICS;
-
-ANALYZE TABLE alltypesparquet COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
-
-
---
--- Table primitives
---
-DROP TABLE IF EXISTS primitives;
-CREATE TABLE primitives (
- id INT COMMENT 'default',
- bool_col BOOLEAN COMMENT 'default',
- tinyint_col TINYINT COMMENT 'default',
- smallint_col SMALLINT COMMENT 'default',
- int_col INT COMMENT 'default',
- bigint_col BIGINT COMMENT 'default',
- float_col FLOAT COMMENT 'default',
- double_col DOUBLE COMMENT 'default',
- date_string_col STRING COMMENT 'default',
- string_col STRING COMMENT 'default',
- timestamp_col TIMESTAMP COMMENT 'default')
-PARTITIONED BY (year INT COMMENT 'default', month INT COMMENT 'default')
-ROW FORMAT DELIMITED
- FIELDS TERMINATED BY ','
- ESCAPED BY '\\'
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090101.txt"
-OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=1);
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090201.txt"
-OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=2);
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090301.txt"
-OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=3);
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/types/primitives/090401.txt"
-OVERWRITE INTO TABLE primitives PARTITION(year=2009, month=4);
--
-- Function qtest_get_java_boolean
@@ -278,78 +58,5 @@ STORED AS
INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
-
---
--- CBO tables
---
-
-drop table if exists cbo_t1;
-drop table if exists cbo_t2;
-drop table if exists cbo_t3;
-drop table if exists src_cbo;
-drop table if exists part;
-drop table if exists lineitem;
-
-set hive.cbo.enable=true;
-
-create table cbo_t1(key string, value string, c_int int, c_float float, c_boolean boolean) partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
-create table cbo_t2(key string, value string, c_int int, c_float float, c_boolean boolean) partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
-create table cbo_t3(key string, value string, c_int int, c_float float, c_boolean boolean) row format delimited fields terminated by ',' STORED AS TEXTFILE;
-
-load data local inpath '${hiveconf:test.data.dir}/cbo_t1.txt' into table cbo_t1 partition (dt='2014');
-load data local inpath '${hiveconf:test.data.dir}/cbo_t2.txt' into table cbo_t2 partition (dt='2014');
-load data local inpath '${hiveconf:test.data.dir}/cbo_t3.txt' into table cbo_t3;
-
-CREATE TABLE part(
- p_partkey INT,
- p_name STRING,
- p_mfgr STRING,
- p_brand STRING,
- p_type STRING,
- p_size INT,
- p_container STRING,
- p_retailprice DOUBLE,
- p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '${hiveconf:test.data.dir}/part_tiny.txt' overwrite into table part;
-
-CREATE TABLE lineitem (L_ORDERKEY INT,
- L_PARTKEY INT,
- L_SUPPKEY INT,
- L_LINENUMBER INT,
- L_QUANTITY DOUBLE,
- L_EXTENDEDPRICE DOUBLE,
- L_DISCOUNT DOUBLE,
- L_TAX DOUBLE,
- L_RETURNFLAG STRING,
- L_LINESTATUS STRING,
- l_shipdate STRING,
- L_COMMITDATE STRING,
- L_RECEIPTDATE STRING,
- L_SHIPINSTRUCT STRING,
- L_SHIPMODE STRING,
- L_COMMENT STRING)
-ROW FORMAT DELIMITED
-FIELDS TERMINATED BY '|';
-
-LOAD DATA LOCAL INPATH '${hiveconf:test.data.dir}/lineitem.txt' OVERWRITE INTO TABLE lineitem;
-
-create table src_cbo as select * from src;
-
-
-analyze table cbo_t1 partition (dt) compute statistics;
-analyze table cbo_t1 compute statistics for columns key, value, c_int, c_float, c_boolean;
-analyze table cbo_t2 partition (dt) compute statistics;
-analyze table cbo_t2 compute statistics for columns key, value, c_int, c_float, c_boolean;
-analyze table cbo_t3 compute statistics;
-analyze table cbo_t3 compute statistics for columns key, value, c_int, c_float, c_boolean;
-analyze table src_cbo compute statistics;
-analyze table src_cbo compute statistics for columns;
-analyze table part compute statistics;
-analyze table part compute statistics for columns;
-analyze table lineitem compute statistics;
-analyze table lineitem compute statistics for columns;
-
reset;
set hive.stats.dbclass=fs;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_compare.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_compare.sql b/data/scripts/q_test_init_compare.sql
index c554250..e69de29 100644
--- a/data/scripts/q_test_init_compare.sql
+++ b/data/scripts/q_test_init_compare.sql
@@ -1,26 +0,0 @@
-set hive.stats.dbclass=fs;
---
--- Table alltypesorc
---
-DROP TABLE IF EXISTS alltypesorc;
-CREATE TABLE alltypesorc(
- ctinyint TINYINT,
- csmallint SMALLINT,
- cint INT,
- cbigint BIGINT,
- cfloat FLOAT,
- cdouble DOUBLE,
- cstring1 STRING,
- cstring2 STRING,
- ctimestamp1 TIMESTAMP,
- ctimestamp2 TIMESTAMP,
- cboolean1 BOOLEAN,
- cboolean2 BOOLEAN)
- STORED AS ORC;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
-OVERWRITE INTO TABLE alltypesorc;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_contrib.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_contrib.sql b/data/scripts/q_test_init_contrib.sql
index b7bb5ab..e69de29 100644
--- a/data/scripts/q_test_init_contrib.sql
+++ b/data/scripts/q_test_init_contrib.sql
@@ -1,29 +0,0 @@
-set hive.stats.dbclass=fs;
---
--- Table src
---
-DROP TABLE IF EXISTS src;
-
-CREATE TABLE src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
-
-ANALYZE TABLE src COMPUTE STATISTICS;
-
-ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table src_thrift
---
-DROP TABLE IF EXISTS src_thrift;
-
-CREATE TABLE src_thrift
-ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer'
-WITH SERDEPROPERTIES (
- 'serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex',
- 'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
-STORED AS SEQUENCEFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/complex.seq" INTO TABLE src_thrift;
-
-ANALYZE TABLE src_thrift COMPUTE STATISTICS;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_for_minimr.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_for_minimr.sql b/data/scripts/q_test_init_for_minimr.sql
index ab2e1d7..e69de29 100644
--- a/data/scripts/q_test_init_for_minimr.sql
+++ b/data/scripts/q_test_init_for_minimr.sql
@@ -1,48 +0,0 @@
---
--- Table src
---
-DROP TABLE IF EXISTS src;
-
-CREATE TABLE src (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" INTO TABLE src;
-
---
--- Table srcpart
---
-DROP TABLE IF EXISTS srcpart;
-
-CREATE TABLE srcpart (key STRING COMMENT 'default', value STRING COMMENT 'default')
-PARTITIONED BY (ds STRING, hr STRING)
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="12");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
-
---
--- Table part
---
-drop table if exists part;
-
-CREATE TABLE part(
- p_partkey INT,
- p_name STRING,
- p_mfgr STRING,
- p_brand STRING,
- p_type STRING,
- p_size INT,
- p_container STRING,
- p_retailprice DOUBLE,
- p_comment STRING
-);
-
-LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_src.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_src.sql b/data/scripts/q_test_init_src.sql
index 765d6c7..e69de29 100644
--- a/data/scripts/q_test_init_src.sql
+++ b/data/scripts/q_test_init_src.sql
@@ -1,9 +0,0 @@
-DROP TABLE IF EXISTS src PURGE;
-
-CREATE TABLE src(key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" OVERWRITE INTO TABLE src;
-
-ANALYZE TABLE src COMPUTE STATISTICS;
-
-ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_src_with_stats.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_src_with_stats.sql b/data/scripts/q_test_init_src_with_stats.sql
index 765d6c7..e69de29 100644
--- a/data/scripts/q_test_init_src_with_stats.sql
+++ b/data/scripts/q_test_init_src_with_stats.sql
@@ -1,9 +0,0 @@
-DROP TABLE IF EXISTS src PURGE;
-
-CREATE TABLE src(key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" OVERWRITE INTO TABLE src;
-
-ANALYZE TABLE src COMPUTE STATISTICS;
-
-ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/data/scripts/q_test_init_tez.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_tez.sql b/data/scripts/q_test_init_tez.sql
index 4e6176b..e69de29 100644
--- a/data/scripts/q_test_init_tez.sql
+++ b/data/scripts/q_test_init_tez.sql
@@ -1,78 +0,0 @@
-set hive.stats.dbclass=fs;
-
---
--- Table src
---
-DROP TABLE IF EXISTS src;
-
-CREATE TABLE src(key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt" OVERWRITE INTO TABLE src;
-
-ANALYZE TABLE src COMPUTE STATISTICS;
-
-ANALYZE TABLE src COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table src1
---
-DROP TABLE IF EXISTS src1;
-
-CREATE TABLE src1 (key STRING COMMENT 'default', value STRING COMMENT 'default') STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv3.txt" INTO TABLE src1;
-
-ANALYZE TABLE src1 COMPUTE STATISTICS;
-
-ANALYZE TABLE src1 COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table srcpart
---
-DROP TABLE IF EXISTS srcpart;
-
-CREATE TABLE srcpart (key STRING COMMENT 'default', value STRING COMMENT 'default')
-PARTITIONED BY (ds STRING, hr STRING)
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-08", hr="12");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="11");
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/kv1.txt"
-OVERWRITE INTO TABLE srcpart PARTITION (ds="2008-04-09", hr="12");
-
-ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS;
-
-ANALYZE TABLE srcpart PARTITION(ds, hr) COMPUTE STATISTICS FOR COLUMNS key,value;
-
---
--- Table alltypesorc
---
-DROP TABLE IF EXISTS alltypesorc;
-CREATE TABLE alltypesorc(
- ctinyint TINYINT,
- csmallint SMALLINT,
- cint INT,
- cbigint BIGINT,
- cfloat FLOAT,
- cdouble DOUBLE,
- cstring1 STRING,
- cstring2 STRING,
- ctimestamp1 TIMESTAMP,
- ctimestamp2 TIMESTAMP,
- cboolean1 BOOLEAN,
- cboolean2 BOOLEAN)
- STORED AS ORC;
-
-LOAD DATA LOCAL INPATH "${hiveconf:test.data.dir}/alltypesorc"
-OVERWRITE INTO TABLE alltypesorc;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS;
-
-ANALYZE TABLE alltypesorc COMPUTE STATISTICS FOR COLUMNS ctinyint,csmallint,cint,cbigint,cfloat,cdouble,cstring1,cstring2,ctimestamp1,ctimestamp2,cboolean1,cboolean2;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/negative/generatehfiles_require_family_path.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/negative/generatehfiles_require_family_path.q b/hbase-handler/src/test/queries/negative/generatehfiles_require_family_path.q
index 6844fbc..e6ff587 100644
--- a/hbase-handler/src/test/queries/negative/generatehfiles_require_family_path.q
+++ b/hbase-handler/src/test/queries/negative/generatehfiles_require_family_path.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- -*- mode:sql -*-
DROP TABLE IF EXISTS hbase_bulk;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/external_table_ppd.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/external_table_ppd.q b/hbase-handler/src/test/queries/positive/external_table_ppd.q
index fbef4bb..c6f80cb 100644
--- a/hbase-handler/src/test/queries/positive/external_table_ppd.q
+++ b/hbase-handler/src/test/queries/positive/external_table_ppd.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE t_hbase;
CREATE TABLE t_hbase(key STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_binary_map_queries.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_binary_map_queries.q b/hbase-handler/src/test/queries/positive/hbase_binary_map_queries.q
index 255a2c7..7d9e16e 100644
--- a/hbase-handler/src/test/queries/positive/hbase_binary_map_queries.q
+++ b/hbase-handler/src/test/queries/positive/hbase_binary_map_queries.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE hbase_src;
CREATE TABLE hbase_src(key STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_binary_map_queries_prefix.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_binary_map_queries_prefix.q b/hbase-handler/src/test/queries/positive/hbase_binary_map_queries_prefix.q
index 9ff4366..c205210 100644
--- a/hbase-handler/src/test/queries/positive/hbase_binary_map_queries_prefix.q
+++ b/hbase-handler/src/test/queries/positive/hbase_binary_map_queries_prefix.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE hbase_src;
CREATE TABLE hbase_src(key STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_binary_storage_queries.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_binary_storage_queries.q b/hbase-handler/src/test/queries/positive/hbase_binary_storage_queries.q
index b048871..86ead91 100644
--- a/hbase-handler/src/test/queries/positive/hbase_binary_storage_queries.q
+++ b/hbase-handler/src/test/queries/positive/hbase_binary_storage_queries.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE t_hbase;
CREATE TABLE t_hbase(key STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_bulk.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_bulk.q b/hbase-handler/src/test/queries/positive/hbase_bulk.q
index 475aafc..d2e62dc 100644
--- a/hbase-handler/src/test/queries/positive/hbase_bulk.q
+++ b/hbase-handler/src/test/queries/positive/hbase_bulk.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
drop table hbsort;
drop table hbpartition;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_custom_key.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key.q b/hbase-handler/src/test/queries/positive/hbase_custom_key.q
index 9dbb2a0..87fbf4a 100644
--- a/hbase-handler/src/test/queries/positive/hbase_custom_key.q
+++ b/hbase-handler/src/test/queries/positive/hbase_custom_key.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE hbase_ck_1(key struct<col1:string,col2:string,col3:string>, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_custom_key2.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key2.q b/hbase-handler/src/test/queries/positive/hbase_custom_key2.q
index 9fba4f6..5116475 100644
--- a/hbase-handler/src/test/queries/positive/hbase_custom_key2.q
+++ b/hbase-handler/src/test/queries/positive/hbase_custom_key2.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE hbase_ck_4(key struct<col1:string,col2:string,col3:string>, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_custom_key3.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_custom_key3.q b/hbase-handler/src/test/queries/positive/hbase_custom_key3.q
index 22d2c9e..488a32f 100644
--- a/hbase-handler/src/test/queries/positive/hbase_custom_key3.q
+++ b/hbase-handler/src/test/queries/positive/hbase_custom_key3.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE hbase_ck_5(key struct<col1:string,col2:string,col3:string>, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_ddl.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_ddl.q b/hbase-handler/src/test/queries/positive/hbase_ddl.q
index a8bae75..dcec8d6 100644
--- a/hbase-handler/src/test/queries/positive/hbase_ddl.q
+++ b/hbase-handler/src/test/queries/positive/hbase_ddl.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE hbase_table_1;
CREATE TABLE hbase_table_1(key int comment 'It is a column key', value string comment 'It is the column string value')
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
index d02a61f..1eadfce 100644
--- a/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
+++ b/hbase-handler/src/test/queries/positive/hbase_handler_bulk.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- -*- mode:sql -*-
drop table if exists hb_target;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q b/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q
index dd4fe0f..e429071 100644
--- a/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q
+++ b/hbase-handler/src/test/queries/positive/hbase_handler_snapshot.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src_hbase
set hive.stats.column.autogather=true;
SET hive.hbase.snapshot.name=src_hbase_snapshot;
SET hive.hbase.snapshot.restoredir=/tmp;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_joins.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_joins.q b/hbase-handler/src/test/queries/positive/hbase_joins.q
index 1616adc..7be9f6a 100644
--- a/hbase-handler/src/test/queries/positive/hbase_joins.q
+++ b/hbase-handler/src/test/queries/positive/hbase_joins.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE users;
DROP TABLE states;
DROP TABLE countries;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_ppd_join.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_ppd_join.q b/hbase-handler/src/test/queries/positive/hbase_ppd_join.q
index 2436c19..b7e2a3b 100644
--- a/hbase-handler/src/test/queries/positive/hbase_ppd_join.q
+++ b/hbase-handler/src/test/queries/positive/hbase_ppd_join.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
--create hive hbase table 1
drop table if exists hive1_tbl_data_hbase1;
drop table if exists hive1_tbl_data_hbase2;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q b/hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q
index 59e724d..cf1d1d4 100644
--- a/hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q
+++ b/hbase-handler/src/test/queries/positive/hbase_ppd_key_range.q
@@ -1,3 +1,5 @@
+--! qt:dataset:src
+--! qt:dataset:part
CREATE TABLE hbase_pushdown(key string, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string");
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_pushdown.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_pushdown.q b/hbase-handler/src/test/queries/positive/hbase_pushdown.q
index 0d29c82..8e366af 100644
--- a/hbase-handler/src/test/queries/positive/hbase_pushdown.q
+++ b/hbase-handler/src/test/queries/positive/hbase_pushdown.q
@@ -1,3 +1,5 @@
+--! qt:dataset:src
+--! qt:dataset:part
CREATE TABLE hbase_pushdown(key int, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string");
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_queries.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_queries.q b/hbase-handler/src/test/queries/positive/hbase_queries.q
index 4604f3e..a4ea0c5 100644
--- a/hbase-handler/src/test/queries/positive/hbase_queries.q
+++ b/hbase-handler/src/test/queries/positive/hbase_queries.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE hbase_table_1;
CREATE TABLE hbase_table_1(key int comment 'It is a column key', value string comment 'It is the column string value')
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_scan_params.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_scan_params.q b/hbase-handler/src/test/queries/positive/hbase_scan_params.q
index 14d7d15..522960c 100644
--- a/hbase-handler/src/test/queries/positive/hbase_scan_params.q
+++ b/hbase-handler/src/test/queries/positive/hbase_scan_params.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE hbase_pushdown(key int, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string",
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q b/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
index 96fec0e..bd4672b 100644
--- a/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
+++ b/hbase-handler/src/test/queries/positive/hbase_single_sourced_multi_insert.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
-- HIVE-4375 Single sourced multi insert consists of native and non-native table mixed throws NPE
CREATE TABLE src_x1(key string, value string);
CREATE TABLE src_x2(key string, value string)
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_timestamp.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_timestamp.q b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
index 6ae2c30..46d7529 100644
--- a/hbase-handler/src/test/queries/positive/hbase_timestamp.q
+++ b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE hbase_table;
CREATE TABLE hbase_table (key string, value string, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q b/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
index a8d5501..75820e1 100644
--- a/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
+++ b/hbase-handler/src/test/queries/positive/hbase_timestamp_format.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
create table hbase_str(rowkey string,mytime string,mystr string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/hbasestats.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbasestats.q b/hbase-handler/src/test/queries/positive/hbasestats.q
index 52e11c9..0185002 100644
--- a/hbase-handler/src/test/queries/positive/hbasestats.q
+++ b/hbase-handler/src/test/queries/positive/hbasestats.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
DROP TABLE users;
CREATE TABLE users(key string, state string, country string, country_id int)
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/hbase-handler/src/test/queries/positive/ppd_key_ranges.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/ppd_key_ranges.q b/hbase-handler/src/test/queries/positive/ppd_key_ranges.q
index 0497d25..8b70c04 100644
--- a/hbase-handler/src/test/queries/positive/ppd_key_ranges.q
+++ b/hbase-handler/src/test/queries/positive/ppd_key_ranges.q
@@ -1,3 +1,4 @@
+--! qt:dataset:src
CREATE TABLE hbase_ppd_keyrange(key int, value string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key#binary,cf:string");
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
index d21fcaf..9c9ba18 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
@@ -43,16 +43,7 @@ public class CoreAccumuloCliDriver extends CliAdapter {
@BeforeClass
public void beforeClass() {
setup = new AccumuloTestSetup();
- }
- @Override
- @AfterClass
- public void shutdown() throws Exception {
- setup.tearDown();
- }
- @Override
- @Before
- public void setUp() {
-
+
MiniClusterType miniMR = cliConfig.getClusterType();
String initScript = cliConfig.getInitScript();
String cleanupScript = cliConfig.getCleanupScript();
@@ -60,14 +51,20 @@ public class CoreAccumuloCliDriver extends CliAdapter {
try {
qt = new AccumuloQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
setup, initScript, cleanupScript);
+
+ // do a one time initialization
+ qt.cleanUp();
+ qt.createSources();
} catch (Exception e) {
throw new RuntimeException("Unexpected exception in setUp",e);
}
}
-
+
@Override
- @After
- public void tearDown() {
+ @AfterClass
+ public void shutdown() throws Exception {
+ setup.tearDown();
+
try {
qt.shutdown();
}
@@ -75,6 +72,15 @@ public class CoreAccumuloCliDriver extends CliAdapter {
throw new RuntimeException("Unexpected exception in tearDown",e);
}
}
+ @Override
+ @Before
+ public void setUp() {
+ }
+
+ @Override
+ @After
+ public void tearDown() {
+ }
@Override
public void runTest(String tname, String fname, String fpath) throws Exception {
@@ -89,7 +95,7 @@ public class CoreAccumuloCliDriver extends CliAdapter {
return;
}
- qt.cliInit(new File(fpath));
+ qt.cliInit(new File(fpath), false);
qt.clearTestSideEffects();
int ecode = qt.executeClient(fname);
if (ecode != 0) {
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
index 1ec1e5e..99748bd 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
@@ -19,11 +19,27 @@ package org.apache.hadoop.hive.cli.control;
import static org.junit.Assert.fail;
-import com.google.common.base.Strings;
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.stream.Stream;
+
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.hadoop.hive.ql.dataset.Dataset;
+import org.apache.hadoop.hive.ql.dataset.DatasetCollection;
+import org.apache.hadoop.hive.ql.dataset.DatasetParser;
import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter;
import org.apache.hive.beeline.ConvertedOutputFile.Converter;
import org.apache.hive.beeline.QFile;
@@ -34,10 +50,8 @@ import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import java.io.File;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.HashMap;
+import com.google.common.base.Strings;
+import com.google.common.collect.ObjectArrays;
public class CoreBeeLineDriver extends CliAdapter {
private final File hiveRootDirectory = new File(AbstractCliConfig.HIVE_ROOT);
@@ -48,11 +62,13 @@ public class CoreBeeLineDriver extends CliAdapter {
private final File cleanupScript;
private final File testDataDirectory;
private final File testScriptDirectory;
+ private final File datasetDirectory;
private boolean overwrite = false;
private boolean useSharedDatabase = false;
private MiniHS2 miniHS2;
private QFileClientBuilder clientBuilder;
private QFileBuilder fileBuilder;
+ private final Map<String, Set<String>> datasets = new HashMap<String, Set<String>>();
public CoreBeeLineDriver(AbstractCliConfig testCliConfig) {
super(testCliConfig);
@@ -71,6 +87,7 @@ public class CoreBeeLineDriver extends CliAdapter {
testDataDirectory = new File(testDataDirectoryName);
}
testScriptDirectory = new File(hiveRootDirectory, "data" + File.separator + "scripts");
+ datasetDirectory = new File(testDataDirectory, "datasets");
String initScriptFileName = System.getProperty("test.init.script");
if (initScriptFileName != null) {
initScript = new File(testScriptDirectory, initScriptFileName);
@@ -156,6 +173,23 @@ public class CoreBeeLineDriver extends CliAdapter {
+ "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e);
}
}
+
+ protected void runInfraScript(String[] commands, File beeLineOutput, File log)
+ throws IOException, SQLException {
+ try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(beeLineOutput)) {
+ String[] preCommands =
+ new String[] { "set hive.exec.pre.hooks=" + PreExecutePrinter.class.getName() + ";",
+ "set test.data.dir=" + testDataDirectory + ";",
+ "set test.script.dir=" + testScriptDirectory + ";" };
+
+ String[] allCommands =
+ Stream.concat(Arrays.stream(preCommands), Arrays.stream(commands)).toArray(String[]::new);
+ beeLineClient.execute(allCommands, log, Converter.NONE);
+ } catch (Exception e) {
+ throw new SQLException("Error running infra commands, "
+ + "\nCheck the following logs for details:\n - " + beeLineOutput + "\n - " + log, e);
+ }
+ }
@Override
@AfterClass
@@ -167,12 +201,12 @@ public class CoreBeeLineDriver extends CliAdapter {
}
}
- public void runTest(QFile qFile) throws Exception {
+ private void runTest(QFile qFile, List<Callable<Void>> preCommands) throws Exception {
try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(qFile.getLogFile())) {
long startTime = System.currentTimeMillis();
System.err.println(">>> STARTED " + qFile.getName());
- beeLineClient.execute(qFile);
+ beeLineClient.execute(qFile, preCommands);
long queryEndTime = System.currentTimeMillis();
System.err.println(">>> EXECUTED " + qFile.getName() + ": " + (queryEndTime - startTime)
@@ -207,6 +241,11 @@ public class CoreBeeLineDriver extends CliAdapter {
throw new Exception("Exception running or analyzing the results of the query file: " + qFile
+ "\n" + qFile.getDebugHint(), e);
}
+
+ }
+
+ public void runTest(QFile qFile) throws Exception {
+ runTest(qFile, null);
}
@Override
@@ -220,6 +259,58 @@ public class CoreBeeLineDriver extends CliAdapter {
@Override
public void runTest(String name, String name2, String absolutePath) throws Exception {
QFile qFile = fileBuilder.getQFile(name);
- runTest(qFile);
+ List<Callable<Void>> commands = initDataSetForTest(qFile);
+ runTest(qFile, commands);
+ }
+
+ private List<Callable<Void>> initDataSetForTest(QFile qFile) throws Exception {
+ DatasetParser parser = new DatasetParser();
+ parser.parse(qFile.getInputFile());
+
+ List<Callable<Void>> commands = new ArrayList<>();
+
+ DatasetCollection datasets = parser.getDatasets();
+ for (String table : datasets.getTables()) {
+ Callable<Void> command = initDataset(table, qFile);
+ if (command != null) {
+ commands.add(command);
+ }
+ }
+
+ return commands;
+ }
+
+ protected Callable<Void> initDataset(String table, QFile qFile) throws Exception {
+ if (datasetInitialized(table, qFile)) {
+ return null;
+ }
+
+ Callable<Void> command = new Callable<Void>() {
+ @Override
+ public Void call() throws Exception {
+ File tableFile = new File(new File(datasetDirectory, table), Dataset.INIT_FILE_NAME);
+ List<String> datasetLines = FileUtils.readLines(tableFile);
+ String[] datasetCommands = datasetLines.toArray(new String[datasetLines.size()]);
+
+ runInfraScript(
+ ObjectArrays.concat(String.format("use %s;", qFile.getDatabaseName()), datasetCommands),
+ new File(logDirectory, "dataset.beeline"), new File(logDirectory, "dataset.raw"));
+
+ return null;
+ }
+ };
+
+ datasets.get(qFile.getDatabaseName()).add(table);
+
+ return command;
+ }
+
+ private boolean datasetInitialized(String table, QFile qFile) {
+ if (datasets.get(qFile.getDatabaseName()) == null) {
+ datasets.put(qFile.getDatabaseName(), new HashSet<String>());
+ return false;
+ }
+
+ return datasets.get(qFile.getDatabaseName()).contains(table);
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
index fbca424..c36d231 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
@@ -133,6 +133,9 @@ public class CoreCompareCliDriver extends CliAdapter{
}
int ecode = 0;
+
+ qt.cliInit(new File(fpath), false);
+
List<String> outputs = new ArrayList<>(versionFiles.size());
for (String versionFile : versionFiles) {
// 1 for "_" after tname; 3 for ".qv" at the end. Version is in between.
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
index 24bdc58..b40b8d7 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.cli.control;
-import static org.apache.hadoop.hive.cli.control.AbstractCliConfig.HIVE_ROOT;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
http://git-wip-us.apache.org/repos/asf/hive/blob/86b678f5/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
index 1873bfe..07df0c9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
@@ -68,14 +68,7 @@ public class HBaseQTestUtil extends QTestUtil {
conf.setBoolean("hive.test.init.phase", true);
- // create and load the input data into the hbase table
- runCreateTableCmd(
- "CREATE TABLE " + HBASE_SRC_NAME + "(key INT, value STRING)"
- + " STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'"
- + " WITH SERDEPROPERTIES ('hbase.columns.mapping' = ':key,cf:val')"
- + " TBLPROPERTIES ('hbase.table.name' = '" + HBASE_SRC_NAME + "')"
- );
- runCmd("INSERT OVERWRITE TABLE " + HBASE_SRC_NAME + " SELECT * FROM src");
+ initDataset(HBASE_SRC_NAME);
// create a snapshot
Admin admin = null;
@@ -93,9 +86,6 @@ public class HBaseQTestUtil extends QTestUtil {
public void cleanUp(String tname) throws Exception {
super.cleanUp(tname);
- // drop in case leftover from unsuccessful run
- db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, HBASE_SRC_NAME);
-
Admin admin = null;
try {
admin = conn.getAdmin();