You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by ja...@apache.org on 2014/10/23 05:08:05 UTC
[8/8] git commit: BIGTOP-1450. Eliminate broken hive test artifacts
in favor of smoke-tests.
BIGTOP-1450. Eliminate broken hive test artifacts in favor of smoke-tests.
Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/e209fdbb
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/e209fdbb
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/e209fdbb
Branch: refs/heads/master
Commit: e209fdbbb2867f7d939a1500fe473b0cae011b58
Parents: 1f209ba
Author: jay@apache.org <ja...@apache.org>
Authored: Wed Oct 22 23:05:00 2014 -0400
Committer: jay@apache.org <jayunit100>
Committed: Wed Oct 22 23:05:54 2014 -0400
----------------------------------------------------------------------
bigtop-tests/test-artifacts/hive/README | 16 -
bigtop-tests/test-artifacts/hive/pom.xml | 32 -
.../hivesmoke/HiveBulkScriptExecutor.groovy | 83 -
.../IntegrationTestHiveSmokeBulk.groovy | 94 -
.../itest/hivesmoke/TestHiveSmokeBulk.groovy | 104 -
.../bigtop/itest/hivesmoke/TestJdbcDriver.java | 159 -
.../hive/src/main/resources/a.txt | 2 -
.../scripts/integration/hbase_joins/in | 96 -
.../scripts/integration/hbase_joins/out | 125 -
.../scripts/integration/hbase_pushdown/in | 67 -
.../scripts/integration/hbase_pushdown/out | 384 ---
.../scripts/integration/hbase_queries/filter | 8 -
.../scripts/integration/hbase_queries/in | 174 --
.../scripts/integration/hbase_queries/out | 701 -----
.../scripts/integration/hbase_stats/filter | 4 -
.../scripts/integration/hbase_stats/in | 35 -
.../scripts/integration/hbase_stats/out | 170 -
.../resources/scripts/ql/auto_join20/filter | 3 -
.../main/resources/scripts/ql/auto_join20/in | 45 -
.../main/resources/scripts/ql/auto_join20/out | 669 ----
.../src/main/resources/scripts/ql/basic/filter | 4 -
.../hive/src/main/resources/scripts/ql/basic/in | 29 -
.../src/main/resources/scripts/ql/basic/out | 19 -
.../scripts/ql/bucketizedhiveinputformat/filter | 5 -
.../scripts/ql/bucketizedhiveinputformat/in | 47 -
.../scripts/ql/bucketizedhiveinputformat/out | 315 --
.../resources/scripts/ql/bucketmapjoin5/filter | 6 -
.../main/resources/scripts/ql/bucketmapjoin5/in | 99 -
.../resources/scripts/ql/bucketmapjoin5/out | 1015 ------
.../scripts/ql/drop_multi_partitions/in | 31 -
.../scripts/ql/drop_multi_partitions/out | 43 -
.../ql/groupby_map_ppr_multi_distinct/filter | 5 -
.../ql/groupby_map_ppr_multi_distinct/in | 34 -
.../ql/groupby_map_ppr_multi_distinct/out | 301 --
.../resources/scripts/ql/index_creation/filter | 8 -
.../main/resources/scripts/ql/index_creation/in | 66 -
.../resources/scripts/ql/index_creation/out | 131 -
.../src/main/resources/scripts/ql/join19/filter | 1 -
.../src/main/resources/scripts/ql/join19/in | 72 -
.../src/main/resources/scripts/ql/join19/out | 332 --
.../resources/scripts/ql/join_filters/filter | 2 -
.../main/resources/scripts/ql/join_filters/in | 169 -
.../main/resources/scripts/ql/join_filters/out | 558 ----
.../resources/scripts/ql/load_dyn_part14/filter | 7 -
.../resources/scripts/ql/load_dyn_part14/in | 49 -
.../resources/scripts/ql/load_dyn_part14/out | 277 --
.../scripts/ql/merge_dynamic_partition/filter | 7 -
.../scripts/ql/merge_dynamic_partition/in | 60 -
.../scripts/ql/merge_dynamic_partition/out | 1852 -----------
.../resources/scripts/ql/multi_insert/filter | 6 -
.../main/resources/scripts/ql/multi_insert/in | 279 --
.../main/resources/scripts/ql/multi_insert/out | 2910 ------------------
.../resources/scripts/ql/rcfile_columnar/filter | 2 -
.../resources/scripts/ql/rcfile_columnar/in | 29 -
.../resources/scripts/ql/rcfile_columnar/out | 29 -
.../src/main/resources/scripts/ql/union3/filter | 4 -
.../src/main/resources/scripts/ql/union3/in | 56 -
.../src/main/resources/scripts/ql/union3/out | 327 --
.../main/resources/scripts/ql/uniquejoin/filter | 4 -
.../src/main/resources/scripts/ql/uniquejoin/in | 39 -
.../main/resources/scripts/ql/uniquejoin/out | 83 -
.../hive/src/main/resources/seed.hql | 49 -
.../src/main/resources/seed_data_files/T1.txt | 6 -
.../src/main/resources/seed_data_files/T2.txt | 6 -
.../src/main/resources/seed_data_files/T3.txt | 4 -
.../seed_data_files/apache.access.2.log | 1 -
.../resources/seed_data_files/apache.access.log | 1 -
.../main/resources/seed_data_files/complex.seq | Bin 1606 -> 0 bytes
.../resources/seed_data_files/covar_tab.txt | 6 -
.../seed_data_files/create_nested_type.txt | 4 -
.../resources/seed_data_files/datatypes.txt | 3 -
.../main/resources/seed_data_files/docurl.txt | 8 -
.../main/resources/seed_data_files/empty1.txt | 0
.../main/resources/seed_data_files/empty2.txt | 0
.../resources/seed_data_files/hive_626_bar.txt | 1 -
.../seed_data_files/hive_626_count.txt | 1 -
.../resources/seed_data_files/hive_626_foo.txt | 1 -
.../src/main/resources/seed_data_files/in1.txt | 3 -
.../src/main/resources/seed_data_files/in2.txt | 3 -
.../src/main/resources/seed_data_files/in3.txt | 4 -
.../src/main/resources/seed_data_files/in4.txt | 7 -
.../src/main/resources/seed_data_files/in5.txt | 19 -
.../src/main/resources/seed_data_files/in6.txt | 19 -
.../src/main/resources/seed_data_files/json.txt | 1 -
.../src/main/resources/seed_data_files/kv1.seq | Bin 10508 -> 0 bytes
.../seed_data_files/kv1.string-sorted.txt | 500 ---
.../src/main/resources/seed_data_files/kv1.txt | 500 ---
.../seed_data_files/kv1.val.sorted.txt | 500 ---
.../resources/seed_data_files/kv1_broken.seq | Bin 216 -> 0 bytes
.../main/resources/seed_data_files/kv1_cb.txt | 500 ---
.../main/resources/seed_data_files/kv1_cc.txt | 500 ---
.../seed_data_files/kv1kv2.cogroup.txt | 1000 ------
.../src/main/resources/seed_data_files/kv2.txt | 500 ---
.../src/main/resources/seed_data_files/kv3.txt | 25 -
.../src/main/resources/seed_data_files/kv4.txt | 1 -
.../src/main/resources/seed_data_files/kv5.txt | 24 -
.../src/main/resources/seed_data_files/kv6.txt | 100 -
.../resources/seed_data_files/lt100.sorted.txt | 84 -
.../main/resources/seed_data_files/lt100.txt | 84 -
.../resources/seed_data_files/lt100.txt.deflate | 3 -
.../src/main/resources/seed_data_files/null.txt | 10 -
.../main/resources/seed_data_files/nullfile.txt | 0
.../sample-queryplan-in-history.txt | 1 -
.../seed_data_files/sample-queryplan.txt | 1 -
.../seed_data_files/smb_bucket_input.rc | Bin 253 -> 0 bytes
.../seed_data_files/smb_bucket_input.txt | 7 -
.../resources/seed_data_files/smbbucket_1.rc | Bin 208 -> 0 bytes
.../resources/seed_data_files/smbbucket_1.txt | 5 -
.../resources/seed_data_files/smbbucket_2.rc | Bin 206 -> 0 bytes
.../resources/seed_data_files/smbbucket_2.txt | 4 -
.../resources/seed_data_files/smbbucket_3.rc | Bin 222 -> 0 bytes
.../resources/seed_data_files/smbbucket_3.txt | 6 -
.../main/resources/seed_data_files/source.txt | 4 -
.../resources/seed_data_files/srcbucket0.txt | 493 ---
.../resources/seed_data_files/srcbucket1.txt | 507 ---
.../resources/seed_data_files/srcbucket20.txt | 118 -
.../resources/seed_data_files/srcbucket21.txt | 120 -
.../resources/seed_data_files/srcbucket22.txt | 124 -
.../resources/seed_data_files/srcbucket23.txt | 138 -
.../main/resources/seed_data_files/string.txt | Bin 93 -> 0 bytes
.../main/resources/seed_data_files/symlink1.txt | 2 -
.../main/resources/seed_data_files/symlink2.txt | 1 -
.../src/main/resources/seed_data_files/test.dat | 6 -
.../main/resources/seed_data_files/text-en.txt | 95 -
.../resources/seed_data_files/union_input.txt | 8 -
.../hive/src/main/resources/test.hql | 28 -
bigtop-tests/test-execution/smokes/hive/pom.xml | 147 -
127 files changed, 18576 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/README
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/README b/bigtop-tests/test-artifacts/hive/README
deleted file mode 100644
index 16d59cb..0000000
--- a/bigtop-tests/test-artifacts/hive/README
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-This is a project to develop and build Hive smoke and system tests.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/pom.xml
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/pom.xml b/bigtop-tests/test-artifacts/hive/pom.xml
deleted file mode 100644
index 633605d..0000000
--- a/bigtop-tests/test-artifacts/hive/pom.xml
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.bigtop.itest</groupId>
- <artifactId>bigtop-smokes</artifactId>
- <version>0.9.0-SNAPSHOT</version>
- <relativePath>../pom.xml</relativePath>
- </parent>
-
- <groupId>org.apache.bigtop.itest</groupId>
- <artifactId>hive-smoke</artifactId>
- <version>0.9.0-SNAPSHOT</version>
- <name>hivesmoke</name>
-</project>
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/HiveBulkScriptExecutor.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/HiveBulkScriptExecutor.groovy b/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/HiveBulkScriptExecutor.groovy
deleted file mode 100644
index a25214a..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/HiveBulkScriptExecutor.groovy
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.itest.hivesmoke
-
-import org.apache.commons.logging.LogFactory
-import org.apache.commons.logging.Log
-import org.apache.bigtop.itest.JarContent
-import org.apache.bigtop.itest.shell.Shell
-import static junit.framework.Assert.assertEquals
-
-public class HiveBulkScriptExecutor {
- static private Log LOG = LogFactory.getLog(Object.class);
-
- static Shell sh = new Shell("/bin/bash -s");
-
- private File scripts;
- private String location;
-
- public static final String RESOURCES ="bigtop-tests/test-artifacts/hive/src/main/resources/"
-
- public HiveBulkScriptExecutor(String l) {
- location = l;
- scripts = new File(location);
-
- if (!scripts.exists()) {
- try{
- JarContent.unpackJarContainer(HiveBulkScriptExecutor.class, '.' , null);
- }
- //BIGTOP-1222 : Support script execution.
- catch(Throwable t){
- LOG.info("Didnt find jar resource, copying resources locally...");
- def resources = System.getenv("BIGTOP_HOME")+"/"+RESOURCES ;
- sh.exec("cp -r ${resources}/* .");
- sh.exec("ls ${l}");
-
- }
- }
- }
-
- public List<String> getScripts() {
- List<String> res = [];
-
- try {
- scripts.eachDir { res.add(it.name); }
- } catch (Throwable ex) {}
- return res;
- }
-
- public void runScript(String test, String extraArgs) {
- String l = "${location}/${test}";
- String test_command="""diff -u <(\$F < ${l}/actual) <(\$F < ${l}/out)""" ;
- sh.exec("""
- F=cat
- if [ -f ${l}/filter ]; then
- chmod 777 ${l}/filter
- F=${l}/filter
- fi
- hive ${extraArgs} -v -f ${l}/in > ${l}/actual && ${test_command}"""
- ) ;
-
- assertEquals("Got unexpected output from test script ${test}",
- 0, sh.ret);
- }
-
- public void runScript(String test) {
- runScript(test, "");
- }
-}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/IntegrationTestHiveSmokeBulk.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/IntegrationTestHiveSmokeBulk.groovy b/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/IntegrationTestHiveSmokeBulk.groovy
deleted file mode 100644
index fd908b1..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/IntegrationTestHiveSmokeBulk.groovy
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.itest.hivesmoke
-
-import org.junit.Test
-import org.junit.AfterClass
-import org.junit.BeforeClass
-import org.apache.bigtop.itest.shell.Shell
-import static junit.framework.Assert.assertEquals
-import org.apache.bigtop.itest.junit.OrderedParameterized
-import org.junit.runner.RunWith
-import org.junit.runners.Parameterized.Parameters
-
-@RunWith(OrderedParameterized.class)
-public class IntegrationTestHiveSmokeBulk {
- private static String test_include =
- System.getProperty("org.apache.bigtop.itest.hivesmoke.IntegrationTestHiveSmokeBulk.test_include");
- private static String test_exclude =
- System.getProperty("org.apache.bigtop.itest.hivesmoke.IntegrationTestHiveSmokeBulk.test_exclude");
- private static String extra_jars =
- System.getProperty("org.apache.bigtop.itest.hivesmoke.IntegrationTestHiveSmokeBulk.extra_jars","");
-
- static Shell sh = new Shell("/bin/bash -s");
- static HiveBulkScriptExecutor scripts = new HiveBulkScriptExecutor("scripts/integration");
-
- private String test;
-
- public IntegrationTestHiveSmokeBulk(String t) {
- test = t;
- }
-
- @BeforeClass
- public static void setUp() {
- def hbase_script = "";
- def hive_script = "";
- Shell shHbase = new Shell("hbase shell");
-
- sh.exec("hive -f ./seed.hql");
- assertEquals("Can not initialize seed databases",
- 0, sh.ret);
-
- ['PARTITION_STAT_TBL', 'countries', 'hbase_pushdown', 'stats_src', 'hbase_part',
- 'hbase_table_0', 'hbase_table_3', 'hbase_table_4', 'hbase_table_6', 'hbase_table_7',
- 'hbase_table_8', 'states', 'users', 'empty_hbase_table'].each {
- hbase_script <<= "disable '${it}'\ndrop '${it}'\n";
- hive_script <<= "drop table ${it};\n";
- }
- shHbase.exec("${hbase_script}\nquit\n\n");
- sh.exec("hive << __EOT__\n${hive_script}__EOT__");
- }
-
- @AfterClass
- public static void tearDown() {
- sh.exec("hadoop fs -rmr -skipTrash /user/hive/warehouse",
- "hadoop fs -rmr -skipTrash /tmp/count");
- }
-
- @Parameters
- public static Map<String, Object[]> readTestCases() {
- List<String> tests;
- if (test_include != null) {
- tests = scripts.getScripts().intersect(Arrays.asList(test_include.split(",")));
- } else if (test_exclude != null) {
- tests = scripts.getScripts() - Arrays.asList(test_exclude.split(","));
- } else {
- tests = scripts.getScripts();
- }
- Map res = [:];
- tests.each {
- res[it] = ([it] as String[]);
- };
- return res;
- }
-
- @Test
- public void testHiveBulk() {
- scripts.runScript(test, "${(extra_jars == '') ? '' : '--auxpath '}$extra_jars");
- }
-}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestHiveSmokeBulk.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestHiveSmokeBulk.groovy b/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestHiveSmokeBulk.groovy
deleted file mode 100644
index f8dc7e6..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestHiveSmokeBulk.groovy
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.bigtop.itest.hivesmoke
-
-import org.junit.Test
-import org.junit.AfterClass
-import org.junit.Before
-import org.junit.BeforeClass
-import org.apache.bigtop.itest.shell.Shell
-import org.apache.bigtop.itest.junit.OrderedParameterized
-import org.junit.runner.RunWith
-import org.junit.runners.Parameterized.Parameters
-import org.apache.commons.logging.LogFactory
-import org.apache.commons.logging.Log
-import static org.junit.Assert.assertTrue
-
-@RunWith(OrderedParameterized.class)
-public class TestHiveSmokeBulk {
-
- static private Log LOG = LogFactory.getLog(Object.class);
-
- private static String test_include =
- System.getProperty("org.apache.bigtop.itest.hivesmoke.TestHiveSmokeBulk.test_include");
- private static String test_exclude =
- System.getProperty("org.apache.bigtop.itest.hivesmoke.TestHiveSmokeBulk.test_exclude");
- static Shell sh = new Shell("/bin/bash -s");
- static HiveBulkScriptExecutor scripts = new HiveBulkScriptExecutor("scripts/ql");
-
- private String test;
-
- public TestHiveSmokeBulk(String t) {
- test = t;
- }
-
- @Before
- public void cleanUp() {
- def hive_script = "";
- Shell shHive = new Shell("hive");
- ["analyze_srcpart","authorization_part","columntable",
- "dest1","hbase_part","hbase_pushdown","merge_dynamic_part",
- "mp","myinput1","nzhang_part14","src_multi1","src_multi2",
- "srcbucket_mapjoin","srcpart_merge_dp","stats_src","t1",
- "triples","text_kv1","union_out", "T1", "T2", "T3", "smb_input1",
- "smb_input2", "srcbucket_mapjoin_part", "bucketmapjoin_hash_result_1",
- "bucketmapjoin_hash_result_2", "bucketmapjoin_tmp_result",
- "srcbucket_mapjoin_part_2"].each {
- hive_script <<= "drop table ${it};\n";
- }
- shHive.exec("${hive_script} quit; \n");
- }
-
- @BeforeClass
- public static void setUp() {
- sh.exec("hive -f ./seed.hql");
- LOG.info(sh.getOut())
- LOG.info(sh.getErr())
- assertTrue("FAILED.. "+sh.getOut()+" "+sh.getErr(),sh.getRet()==0);
- }
-
- @AfterClass
- public static void tearDown() {
- sh.exec("hadoop fs -rmr -skipTrash /user/hive/warehouse",
- "hadoop fs -rmr -skipTrash /tmp/count");
- }
-
- @Parameters
- public static Map<String, Object[]> readTestCases() {
- LOG.info("hive includes = " + test_include);
- List<String> tests;
- if (test_include != null) {
- tests = scripts.getScripts().intersect(Arrays.asList(test_include.split(",")));
- } else if (test_exclude != null) {
- tests = scripts.getScripts() - Arrays.asList(test_exclude.split(","));
- } else {
- tests = scripts.getScripts();
- }
- LOG.info("HIVE TESTS = " + tests);
- Map res = [:];
- tests.each {
- res[it] = ([it] as String[]);
- };
- return res;
- }
-
- @Test
- public void testHiveBulk() {
- scripts.runScript(test);
- }
-}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestJdbcDriver.java b/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestJdbcDriver.java
deleted file mode 100644
index 5a71a14..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/groovy/org/apache/bigtop/itest/hivesmoke/TestJdbcDriver.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.bigtop.itest.hivesmoke;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Date;
-
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-
-import org.apache.bigtop.itest.Contract;
-import org.apache.bigtop.itest.ParameterSetter;
-import org.apache.bigtop.itest.Property;
-import org.apache.bigtop.itest.shell.Shell;
-
-@Contract(
- properties = {
- @Property(name="hiveserver.startup.wait", type=Property.Type.INT, longValue=3000, intValue=3000, defaultValue="3000")
- },
- env = {})
-public class TestJdbcDriver {
-
- public static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
- public static String hiveserver_url = "jdbc:hive://localhost:10000/default";
- public static Shell sh = new Shell("/bin/bash -s");
- public static String testDir = "/tmp/hive-jdbc." + (new Date().getTime());
- public static String hiveserver_pid;
- public static int hiveserver_startup_wait;
- private Connection con;
-
- @BeforeClass
- public static void setUp() throws ClassNotFoundException, InterruptedException, NoSuchFieldException, IllegalAccessException {
- ParameterSetter.setProperties(TestJdbcDriver.class, new String[] {"hiveserver_startup_wait"});
- System.out.println("hiveserver_startup_wait: " + hiveserver_startup_wait);
- Class.forName(driverName);
- sh.exec("hadoop fs -mkdir " + testDir);
- assertTrue("Could not create test directory", sh.getRet() == 0);
- sh.exec("hadoop fs -copyFromLocal a.txt " + testDir + "/a.txt");
- assertTrue("Could not copy local file to test directory", sh.getRet() == 0);
- // start hiveserver in background and remember the pid
- sh.exec("(HIVE_PORT=10000 hive --service hiveserver > /dev/null 2>&1 & echo $! ) 2> /dev/null");
- hiveserver_pid = sh.getOut().get(0);
- Thread.sleep(hiveserver_startup_wait); // allow time for hiveserver to be up
- }
-
- @Before
- public void getConnection() throws SQLException {
- //System.out.println(hiveserver_url);
- con = DriverManager.getConnection(hiveserver_url, "", "");
- //System.out.println("JDBC connection is " +
- // (con == null ? "not instantiated." : "instantiated."));
- }
-
- @After
- public void closeConnection() throws SQLException {
- if (con != null)
- con.close();
- }
-
- @AfterClass
- public static void tearDown() {
- sh.exec("hadoop fs -rmr -skipTrash " + testDir);
- sh.exec("kill -9 " + hiveserver_pid);
- }
-
- @Test(timeout=120000L)
- public void testCreate() throws SQLException {
- Statement stmt = con.createStatement();
- String tableName = "hive_jdbc_driver_test";
- stmt.executeQuery("drop table if exists " + tableName);
- ResultSet res = stmt.executeQuery("create table " + tableName +
- " (key int, value string)");
- // show tables
- String sql = "show tables";
- //System.out.println("executing: " + sql);
- res = stmt.executeQuery(sql);
- boolean tableCreated = false;
- while (res.next()) {
- String tab_name = res.getString(1);
- //System.out.println(tab_name);
- if (tab_name.equals(tableName))
- tableCreated = true;
- }
- assertTrue("table " + tableName + " does not appear to be created",
- tableCreated);
- // describe table
- sql = "describe " + tableName;
- //System.out.println("executing: " + sql);
- res = stmt.executeQuery(sql);
- List<String> colNames = new ArrayList<String>();
- List<String> dataTypes = new ArrayList<String>();
- while (res.next()) {
- String col_name = res.getString(1);
- String data_type = res.getString(2);
- colNames.add(col_name);
- dataTypes.add(data_type);
- //System.out.println(col_name + "\t" + data_type);
- }
- assertEquals("table should have two columns", 2, colNames.size());
- assertEquals("key", colNames.get(0));
- assertEquals("value", colNames.get(1));
- assertEquals("int", dataTypes.get(0));
- assertEquals("string", dataTypes.get(1));
-
- // load data into table
- String filepath = testDir + "/a.txt"; // this is an hdfs filepath
- sql = "load data inpath '" + filepath + "' into table " + tableName;
- //System.out.println("executing: " + sql);
- res = stmt.executeQuery(sql);
-
- // select
- sql = "select * from " + tableName;
- //System.out.println("executing: " + sql);
- res = stmt.executeQuery(sql);
- List<Integer> keys = new ArrayList<Integer>();
- List<String> values = new ArrayList<String>();
- while (res.next()) {
- int key = res.getInt(1);
- String value = res.getString(2);
- keys.add(new Integer(key));
- values.add(value);
- //System.out.println("" + key + "\t" + value);
- }
- assertEquals("table should have two rows", 2, keys.size());
- assertEquals(new Integer(1), keys.get(0));
- assertEquals(new Integer(2), keys.get(1));
- assertEquals("foo", values.get(0));
- assertEquals("bar", values.get(1));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/a.txt
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/a.txt b/bigtop-tests/test-artifacts/hive/src/main/resources/a.txt
deleted file mode 100644
index b930c1b..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/a.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-1foo
-2bar
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/in
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/in b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/in
deleted file mode 100644
index f19e369..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/in
+++ /dev/null
@@ -1,96 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-DROP TABLE users;
-DROP TABLE states;
-DROP TABLE countries;
-DROP TABLE users_level;
-
--- From HIVE-1257
-
-CREATE TABLE users(key string, state string, country string, country_id int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "info:state,info:country,info:country_id"
-);
-
-CREATE TABLE states(key string, name string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "state:name"
-);
-
-CREATE TABLE countries(key string, name string, country string, country_id int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "info:name,info:country,info:country_id"
-);
-
-INSERT OVERWRITE TABLE users SELECT 'user1', 'IA', 'USA', 0
-FROM src WHERE key=100;
-
-INSERT OVERWRITE TABLE states SELECT 'IA', 'Iowa'
-FROM src WHERE key=100;
-
-INSERT OVERWRITE TABLE countries SELECT 'USA', 'United States', 'USA', 1
-FROM src WHERE key=100;
-
-set hive.input.format = org.apache.hadoop.hive.ql.io.HiveInputFormat;
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.key);
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.country);
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country_id = c.country_id);
-
-SELECT u.key, u.state, s.name FROM users u JOIN states s
-ON (u.state = s.key);
-
-set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.key);
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.country);
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country_id = c.country_id);
-
-SELECT u.key, u.state, s.name FROM users u JOIN states s
-ON (u.state = s.key);
-
-DROP TABLE users;
-DROP TABLE states;
-DROP TABLE countries;
-
-CREATE TABLE users(key int, userid int, username string, created int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:nickname,f:created");
-
-CREATE TABLE users_level(key int, userid int, level int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:level");
-
--- HIVE-1903: the problem fixed here showed up even without any data,
--- so no need to load any to test it
-SELECT year(from_unixtime(users.created)) AS year, level, count(users.userid) AS num
- FROM users JOIN users_level ON (users.userid = users_level.userid)
- GROUP BY year(from_unixtime(users.created)), level;
-
-DROP TABLE users;
-DROP TABLE users_level;
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/out
deleted file mode 100644
index 4ffbf2b..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_joins/out
+++ /dev/null
@@ -1,125 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-DROP TABLE users
-
-DROP TABLE states
-
-DROP TABLE countries
-
-DROP TABLE users_level
-
-
--- From HIVE-1257
-
-CREATE TABLE users(key string, state string, country string, country_id int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "info:state,info:country,info:country_id"
-)
-
-
-CREATE TABLE states(key string, name string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "state:name"
-)
-
-
-CREATE TABLE countries(key string, name string, country string, country_id int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "info:name,info:country,info:country_id"
-)
-
-
-INSERT OVERWRITE TABLE users SELECT 'user1', 'IA', 'USA', 0
-FROM src WHERE key=100
-
-
-INSERT OVERWRITE TABLE states SELECT 'IA', 'Iowa'
-FROM src WHERE key=100
-
-
-INSERT OVERWRITE TABLE countries SELECT 'USA', 'United States', 'USA', 1
-FROM src WHERE key=100
-set hive.input.format = org.apache.hadoop.hive.ql.io.HiveInputFormat
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.key)
-user1 USA United States USA
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.country)
-user1 USA United States USA
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country_id = c.country_id)
-
-
-SELECT u.key, u.state, s.name FROM users u JOIN states s
-ON (u.state = s.key)
-user1 IA Iowa
-set hive.input.format = org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.key)
-user1 USA United States USA
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country = c.country)
-user1 USA United States USA
-
-
-SELECT u.key, u.country, c.name, c.key FROM users u JOIN countries c
-ON (u.country_id = c.country_id)
-
-
-SELECT u.key, u.state, s.name FROM users u JOIN states s
-ON (u.state = s.key)
-user1 IA Iowa
-
-
-DROP TABLE users
-
-DROP TABLE states
-
-DROP TABLE countries
-
-
-CREATE TABLE users(key int, userid int, username string, created int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:nickname,f:created")
-
-
-CREATE TABLE users_level(key int, userid int, level int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,f:userid,f:level")
-
-
--- HIVE-1903: the problem fixed here showed up even without any data,
--- so no need to load any to test it
-SELECT year(from_unixtime(users.created)) AS year, level, count(users.userid) AS num
- FROM users JOIN users_level ON (users.userid = users_level.userid)
- GROUP BY year(from_unixtime(users.created)), level
-
-
-DROP TABLE users
-
-DROP TABLE users_level
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/in
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/in b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/in
deleted file mode 100644
index e1ffa50..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/in
+++ /dev/null
@@ -1,67 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-CREATE TABLE hbase_pushdown(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string");
-
-INSERT OVERWRITE TABLE hbase_pushdown
-SELECT *
-FROM src;
-
--- with full pushdown
-explain select * from hbase_pushdown where key=90;
-
-select * from hbase_pushdown where key=90;
-
--- with partial pushdown
-
-explain select * from hbase_pushdown where key=90 and value like '%90%';
-
-select * from hbase_pushdown where key=90 and value like '%90%';
-
--- with two residuals
-
-explain select * from hbase_pushdown
-where key=90 and value like '%90%' and key=cast(value as int);
-
--- with contradictory pushdowns
-
-explain select * from hbase_pushdown
-where key=80 and key=90 and value like '%90%';
-
-select * from hbase_pushdown
-where key=80 and key=90 and value like '%90%';
-
--- with nothing to push down
-
-explain select * from hbase_pushdown;
-
--- with a predicate which is not actually part of the filter, so
--- it should be ignored by pushdown
-
-explain select * from hbase_pushdown
-where (case when key=90 then 2 else 4 end) > 3;
-
--- with a predicate which is under an OR, so it should
--- be ignored by pushdown
-
-explain select * from hbase_pushdown
-where key=80 or value like '%90%';
-
-set hive.optimize.ppd.storage=false;
-
--- with pushdown disabled
-
-explain select * from hbase_pushdown where key=90;
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/out
deleted file mode 100644
index c478814..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_pushdown/out
+++ /dev/null
@@ -1,384 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-CREATE TABLE hbase_pushdown(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
-
-
-INSERT OVERWRITE TABLE hbase_pushdown
-SELECT *
-FROM src
-
-
--- with full pushdown
-explain select * from hbase_pushdown where key=90
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 90))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- filterExpr:
- expr: (key = 90)
- type: boolean
- Filter Operator
- predicate:
- expr: (key = 90)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
-select * from hbase_pushdown where key=90
-90 val_90
-
-
--- with partial pushdown
-
-explain select * from hbase_pushdown where key=90 and value like '%90%'
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (= (TOK_TABLE_OR_COL key) 90) (like (TOK_TABLE_OR_COL value) '%90%')))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- filterExpr:
- expr: (key = 90)
- type: boolean
- Filter Operator
- predicate:
- expr: (value like '%90%')
- type: boolean
- Filter Operator
- predicate:
- expr: ((key = 90) and (value like '%90%'))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
-select * from hbase_pushdown where key=90 and value like '%90%'
-90 val_90
-
-
--- with two residuals
-
-explain select * from hbase_pushdown
-where key=90 and value like '%90%' and key=cast(value as int)
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL key) 90) (like (TOK_TABLE_OR_COL value) '%90%')) (= (TOK_TABLE_OR_COL key) (TOK_FUNCTION TOK_INT (TOK_TABLE_OR_COL value)))))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- filterExpr:
- expr: (key = 90)
- type: boolean
- Filter Operator
- predicate:
- expr: ((value like '%90%') and (key = UDFToInteger(value)))
- type: boolean
- Filter Operator
- predicate:
- expr: (((key = 90) and (value like '%90%')) and (key = UDFToInteger(value)))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
--- with contradictory pushdowns
-
-explain select * from hbase_pushdown
-where key=80 and key=90 and value like '%90%'
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL key) 80) (= (TOK_TABLE_OR_COL key) 90)) (like (TOK_TABLE_OR_COL value) '%90%')))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- Filter Operator
- predicate:
- expr: (((key = 80) and (key = 90)) and (value like '%90%'))
- type: boolean
- Filter Operator
- predicate:
- expr: (((key = 80) and (key = 90)) and (value like '%90%'))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
-select * from hbase_pushdown
-where key=80 and key=90 and value like '%90%'
-
-
--- with nothing to push down
-
-explain select * from hbase_pushdown
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
--- with a predicate which is not actually part of the filter, so
--- it should be ignored by pushdown
-
-explain select * from hbase_pushdown
-where (case when key=90 then 2 else 4 end) > 3
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (> (TOK_FUNCTION when (= (TOK_TABLE_OR_COL key) 90) 2 4) 3))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- Filter Operator
- predicate:
- expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3)
- type: boolean
- Filter Operator
- predicate:
- expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
--- with a predicate which is under an OR, so it should
--- be ignored by pushdown
-
-explain select * from hbase_pushdown
-where key=80 or value like '%90%'
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (or (= (TOK_TABLE_OR_COL key) 80) (like (TOK_TABLE_OR_COL value) '%90%')))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- Filter Operator
- predicate:
- expr: ((key = 80) or (value like '%90%'))
- type: boolean
- Filter Operator
- predicate:
- expr: ((key = 80) or (value like '%90%'))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-set hive.optimize.ppd.storage=false
-
-
--- with pushdown disabled
-
-explain select * from hbase_pushdown where key=90
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_pushdown))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 90))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- hbase_pushdown
- TableScan
- alias: hbase_pushdown
- Filter Operator
- predicate:
- expr: (key = 90)
- type: boolean
- Filter Operator
- predicate:
- expr: (key = 90)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/filter
deleted file mode 100644
index 8c5f44f..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/filter
+++ /dev/null
@@ -1,8 +0,0 @@
-sed -e 's#hdfs://[^/]*/#HDFS_URL/#' \
- -e 's#createTime:[0-9][0-9]*#createTime:NOW#g' \
- -e 's#transient_lastDdlTime=[0-9][0-9]*#transient_lastDdlTime=NOW#g' \
- -e '/Map Operator Tree:/,/Reduce Output Operator/d' \
- -e '/^ (TOK_QUERY/d' \
- -e '/Detailed Table Information/s#owner:[^,]*,#owner:OWNER,#' \
- -e 's#name: default.hbase_table_3#name: HBASE_TABLE#' \
- -e 's#name: hbase_table_3#name: HBASE_TABLE#'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/in
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/in b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/in
deleted file mode 100644
index 68f1310..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/in
+++ /dev/null
@@ -1,174 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-DROP TABLE hbase_table_1;
-CREATE TABLE hbase_table_1(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_0");
-
-DESCRIBE EXTENDED hbase_table_1;
-
-select * from hbase_table_1;
-
-EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0;
-FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0;
-
-DROP TABLE hbase_table_2;
-CREATE EXTERNAL TABLE hbase_table_2(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_0");
-
-EXPLAIN
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.* FROM src) Y
-ON (x.key = Y.key)
-ORDER BY key, value LIMIT 20;
-
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.* FROM src) Y
-ON (x.key = Y.key)
-ORDER BY key, value LIMIT 20;
-
-EXPLAIN
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
-JOIN
-(SELECT hbase_table_2.* FROM hbase_table_2 WHERE hbase_table_2.key < 120) Y
-ON (x.key = Y.key)
-ORDER BY key, value;
-
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
-JOIN
-(SELECT hbase_table_2.* FROM hbase_table_2 WHERE hbase_table_2.key < 120) Y
-ON (x.key = Y.key)
-ORDER BY key,value;
-
-DROP TABLE empty_hbase_table;
-CREATE TABLE empty_hbase_table(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string");
-
-DROP TABLE empty_normal_table;
-CREATE TABLE empty_normal_table(key int, value string);
-
-select * from (select count(1) as c from empty_normal_table union all select count(1) as c from empty_hbase_table) x order by c;
-select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c;
-select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c;
-select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c;
-
-CREATE TABLE hbase_table_3(key int, value string, count int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "cf:val,cf2:count"
-);
-
-EXPLAIN
-INSERT OVERWRITE TABLE hbase_table_3
-SELECT x.key, x.value, Y.count
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.key, count(src.key) as count FROM src GROUP BY src.key) Y
-ON (x.key = Y.key);
-
-INSERT OVERWRITE TABLE hbase_table_3
-SELECT x.key, x.value, Y.count
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.key, count(src.key) as count FROM src GROUP BY src.key) Y
-ON (x.key = Y.key);
-
-select count(1) from hbase_table_3;
-select * from hbase_table_3 order by key, value limit 5;
-select key, count from hbase_table_3 order by key, count desc limit 5;
-
-DROP TABLE hbase_table_4;
-CREATE TABLE hbase_table_4(key int, value1 string, value2 int, value3 int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "a:b,a:c,d:e"
-);
-
-INSERT OVERWRITE TABLE hbase_table_4 SELECT key, value, key+1, key+2
-FROM src WHERE key=98 OR key=100;
-
-SELECT * FROM hbase_table_4 ORDER BY key;
-
-DROP TABLE hbase_table_5;
-CREATE EXTERNAL TABLE hbase_table_5(key int, value map<string,string>)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "a:")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_4");
-
-SELECT * FROM hbase_table_5 ORDER BY key;
-
-DROP TABLE hbase_table_6;
-CREATE TABLE hbase_table_6(key int, value map<string,string>)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = ":key,cf:"
-);
-INSERT OVERWRITE TABLE hbase_table_6 SELECT key, map(value, key) FROM src
-WHERE key=98 OR key=100;
-
-SELECT * FROM hbase_table_6 ORDER BY key;
-
-DROP TABLE hbase_table_7;
-CREATE TABLE hbase_table_7(value map<string,string>, key int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "cf:,:key"
-);
-INSERT OVERWRITE TABLE hbase_table_7
-SELECT map(value, key, upper(value), key+1), key FROM src
-WHERE key=98 OR key=100;
-
-SELECT * FROM hbase_table_7 ORDER BY key;
-
-set hive.hbase.wal.enabled=false;
-
-DROP TABLE hbase_table_8;
-CREATE TABLE hbase_table_8(key int, value1 string, value2 int, value3 int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "a:b,a:c,d:e"
-);
-
-INSERT OVERWRITE TABLE hbase_table_8 SELECT key, value, key+1, key+2
-FROM src WHERE key=98 OR key=100;
-
-SELECT * FROM hbase_table_8 ORDER BY key;
-
-DROP TABLE hbase_table_1;
-DROP TABLE hbase_table_2;
-DROP TABLE hbase_table_3;
-DROP TABLE hbase_table_4;
-DROP TABLE hbase_table_5;
-DROP TABLE hbase_table_6;
-DROP TABLE hbase_table_7;
-DROP TABLE hbase_table_8;
-DROP TABLE empty_hbase_table;
-DROP TABLE empty_normal_table;
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/out
deleted file mode 100644
index 0b2b6fe..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_queries/out
+++ /dev/null
@@ -1,701 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-DROP TABLE hbase_table_1
-
-CREATE TABLE hbase_table_1(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
-
-
-DESCRIBE EXTENDED hbase_table_1
-key int from deserializer
-value string from deserializer
-
-Detailed Table Information Table(tableName:hbase_table_1, dbName:default, owner:testuser1@MINOTAUR.CLOUDERA.COM, createTime:1301900428, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://minotaur01.sf.cloudera.com:17020/user/hive/warehouse/hbase_table_1, inputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat, outputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.hbase.HBaseSerDe, parameters:{serialization.format=1, hbase.columns.mapping=cf:string}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{hbase.table.name=hbase_table_0, transient_lastDdlTime=1301900428, storage_handler=org.apache.hadoop.hive.hbase.HBaseStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
-
-
-select * from hbase_table_1
-
-
-EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME hbase_table_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (% (TOK_TABLE_OR_COL key) 2) 0))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- src
- TableScan
- alias: src
- Filter Operator
- predicate:
- expr: ((key % 2) = 0)
- type: boolean
- Filter Operator
- predicate:
- expr: ((key % 2) = 0)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: UDFToInteger(_col0)
- type: int
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat
- output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat
- serde: org.apache.hadoop.hive.hbase.HBaseSerDe
- name: default.hbase_table_1
-
-
-
-FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
-
-
-DROP TABLE hbase_table_2
-
-CREATE EXTERNAL TABLE hbase_table_2(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
-
-
-EXPLAIN
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.* FROM src) Y
-ON (x.key = Y.key)
-ORDER BY key, value LIMIT 20
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_table_1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME hbase_table_1)))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))))) Y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME Y)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))) (TOK_LIMIT 20)))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-2 depends on stages: Stage-1
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- x:hbase_table_1
- TableScan
- alias: hbase_table_1
- Select Operator
- expressions:
- expr: key
- type: int
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: UDFToDouble(_col0)
- type: double
- sort order: +
- Map-reduce partition columns:
- expr: UDFToDouble(_col0)
- type: double
- tag: 0
- y:src
- TableScan
- alias: src
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: UDFToDouble(_col0)
- type: double
- sort order: +
- Map-reduce partition columns:
- expr: UDFToDouble(_col0)
- type: double
- tag: 1
- value expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- condition expressions:
- 0
- 1 {VALUE._col0} {VALUE._col1}
- handleSkewJoin: false
- outputColumnNames: _col2, _col3
- Select Operator
- expressions:
- expr: _col2
- type: string
- expr: _col3
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
- Stage: Stage-2
- Map Reduce
- Alias -> Map Operator Tree:
- hdfs://minotaur01.sf.cloudera.com:17020/tmp/hive-testuser1/hive_2011-04-04_00-00-37_447_311787050586995300/-mr-10002
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- tag: -1
- value expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- Reduce Operator Tree:
- Extract
- Limit
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: 20
-
-
-
-
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.* FROM src) Y
-ON (x.key = Y.key)
-ORDER BY key, value LIMIT 20
-0 val_0
-0 val_0
-0 val_0
-10 val_10
-100 val_100
-100 val_100
-104 val_104
-104 val_104
-114 val_114
-116 val_116
-118 val_118
-118 val_118
-12 val_12
-12 val_12
-120 val_120
-120 val_120
-126 val_126
-128 val_128
-128 val_128
-128 val_128
-
-
-EXPLAIN
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
-JOIN
-(SELECT hbase_table_2.* FROM hbase_table_2 WHERE hbase_table_2.key < 120) Y
-ON (x.key = Y.key)
-ORDER BY key, value
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_table_1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME hbase_table_1)))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL hbase_table_1) key) 100)))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_table_2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME hbase_table_2)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL hbase_table_2) key) 120)))) Y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME Y)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-2 depends on stages: Stage-1
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- x:hbase_table_1
- TableScan
- alias: hbase_table_1
- Filter Operator
- predicate:
- expr: (key > 100)
- type: boolean
- Filter Operator
- predicate:
- expr: (key > 100)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: int
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: int
- tag: 0
- y:hbase_table_2
- TableScan
- alias: hbase_table_2
- Filter Operator
- predicate:
- expr: (key < 120)
- type: boolean
- Filter Operator
- predicate:
- expr: (key < 120)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: int
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: int
- tag: 1
- value expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- condition expressions:
- 0
- 1 {VALUE._col0} {VALUE._col1}
- handleSkewJoin: false
- outputColumnNames: _col2, _col3
- Select Operator
- expressions:
- expr: _col2
- type: int
- expr: _col3
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
- Stage: Stage-2
- Map Reduce
- Alias -> Map Operator Tree:
- hdfs://minotaur01.sf.cloudera.com:17020/tmp/hive-testuser1/hive_2011-04-04_00-01-03_920_5397268077686778739/-mr-10002
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- sort order: ++
- tag: -1
- value expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- Reduce Operator Tree:
- Extract
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
-
-
-
-
-SELECT Y.*
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
-JOIN
-(SELECT hbase_table_2.* FROM hbase_table_2 WHERE hbase_table_2.key < 120) Y
-ON (x.key = Y.key)
-ORDER BY key,value
-104 val_104
-114 val_114
-116 val_116
-118 val_118
-
-
-DROP TABLE empty_hbase_table
-
-CREATE TABLE empty_hbase_table(key int, value string)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:string")
-
-
-DROP TABLE empty_normal_table
-
-CREATE TABLE empty_normal_table(key int, value string)
-
-
-select * from (select count(1) as c from empty_normal_table union all select count(1) as c from empty_hbase_table) x order by c
-0
-0
-
-select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c
-0
-155
-
-select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c
-0
-500
-
-select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c
-155
-500
-
-
-CREATE TABLE hbase_table_3(key int, value string, count int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "cf:val,cf2:count"
-)
-
-
-EXPLAIN
-INSERT OVERWRITE TABLE hbase_table_3
-SELECT x.key, x.value, Y.count
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.key, count(src.key) as count FROM src GROUP BY src.key) Y
-ON (x.key = Y.key)
-ABSTRACT SYNTAX TREE:
- (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME hbase_table_1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME hbase_table_1)))))) x) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) count)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) key)))) Y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL Y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME hbase_table_3))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL Y) count)))))
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- y:src
- TableScan
- alias: src
- Select Operator
- expressions:
- expr: key
- type: string
- outputColumnNames: key
- Group By Operator
- aggregations:
- expr: count(key)
- bucketGroup: false
- keys:
- expr: key
- type: string
- mode: hash
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: -1
- value expressions:
- expr: _col1
- type: bigint
- Reduce Operator Tree:
- Group By Operator
- aggregations:
- expr: count(VALUE._col0)
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: bigint
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- $INTNAME
- Reduce Output Operator
- key expressions:
- expr: UDFToDouble(_col0)
- type: double
- sort order: +
- Map-reduce partition columns:
- expr: UDFToDouble(_col0)
- type: double
- tag: 1
- value expressions:
- expr: _col1
- type: bigint
- x:hbase_table_1
- TableScan
- alias: hbase_table_1
- Select Operator
- expressions:
- expr: key
- type: int
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: UDFToDouble(_col0)
- type: double
- sort order: +
- Map-reduce partition columns:
- expr: UDFToDouble(_col0)
- type: double
- tag: 0
- value expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- Reduce Operator Tree:
- Join Operator
- condition map:
- Inner Join 0 to 1
- condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col1}
- handleSkewJoin: false
- outputColumnNames: _col0, _col1, _col3
- Select Operator
- expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- expr: _col3
- type: bigint
- outputColumnNames: _col0, _col1, _col2
- Select Operator
- expressions:
- expr: _col0
- type: int
- expr: _col1
- type: string
- expr: UDFToInteger(_col2)
- type: int
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat
- output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat
- serde: org.apache.hadoop.hive.hbase.HBaseSerDe
- name: default.hbase_table_3
-
-
-
-
-INSERT OVERWRITE TABLE hbase_table_3
-SELECT x.key, x.value, Y.count
-FROM
-(SELECT hbase_table_1.* FROM hbase_table_1) x
-JOIN
-(SELECT src.key, count(src.key) as count FROM src GROUP BY src.key) Y
-ON (x.key = Y.key)
-
-
-select count(1) from hbase_table_3
-155
-
-select * from hbase_table_3 order by key, value limit 5
-0 val_0 3
-2 val_2 1
-4 val_4 1
-8 val_8 1
-10 val_10 1
-
-select key, count from hbase_table_3 order by key, count desc limit 5
-0 3
-2 1
-4 1
-8 1
-10 1
-
-
-DROP TABLE hbase_table_4
-
-CREATE TABLE hbase_table_4(key int, value1 string, value2 int, value3 int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "a:b,a:c,d:e"
-)
-
-
-INSERT OVERWRITE TABLE hbase_table_4 SELECT key, value, key+1, key+2
-FROM src WHERE key=98 OR key=100
-
-
-SELECT * FROM hbase_table_4 ORDER BY key
-98 val_98 99 100
-100 val_100 101 102
-
-
-DROP TABLE hbase_table_5
-
-CREATE EXTERNAL TABLE hbase_table_5(key int, value map<string,string>)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES ("hbase.columns.mapping" = "a:")
-TBLPROPERTIES ("hbase.table.name" = "hbase_table_4")
-
-
-SELECT * FROM hbase_table_5 ORDER BY key
-98 {"b":"val_98","c":"99"}
-100 {"b":"val_100","c":"101"}
-
-
-DROP TABLE hbase_table_6
-
-CREATE TABLE hbase_table_6(key int, value map<string,string>)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = ":key,cf:"
-)
-
-INSERT OVERWRITE TABLE hbase_table_6 SELECT key, map(value, key) FROM src
-WHERE key=98 OR key=100
-
-
-SELECT * FROM hbase_table_6 ORDER BY key
-98 {"val_98":"98"}
-100 {"val_100":"100"}
-
-
-DROP TABLE hbase_table_7
-
-CREATE TABLE hbase_table_7(value map<string,string>, key int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "cf:,:key"
-)
-
-INSERT OVERWRITE TABLE hbase_table_7
-SELECT map(value, key, upper(value), key+1), key FROM src
-WHERE key=98 OR key=100
-
-
-SELECT * FROM hbase_table_7 ORDER BY key
-{"VAL_98":"99.0","val_98":"98"} 98
-{"VAL_100":"101.0","val_100":"100"} 100
-set hive.hbase.wal.enabled=false
-
-
-DROP TABLE hbase_table_8
-
-CREATE TABLE hbase_table_8(key int, value1 string, value2 int, value3 int)
-STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
-WITH SERDEPROPERTIES (
-"hbase.columns.mapping" = "a:b,a:c,d:e"
-)
-
-
-INSERT OVERWRITE TABLE hbase_table_8 SELECT key, value, key+1, key+2
-FROM src WHERE key=98 OR key=100
-
-
-SELECT * FROM hbase_table_8 ORDER BY key
-98 val_98 99 100
-100 val_100 101 102
-
-
-DROP TABLE hbase_table_1
-
-DROP TABLE hbase_table_2
-
-DROP TABLE hbase_table_3
-
-DROP TABLE hbase_table_4
-
-DROP TABLE hbase_table_5
-
-DROP TABLE hbase_table_6
-
-DROP TABLE hbase_table_7
-
-DROP TABLE hbase_table_8
-
-DROP TABLE empty_hbase_table
-
-DROP TABLE empty_normal_table
http://git-wip-us.apache.org/repos/asf/bigtop/blob/e209fdbb/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_stats/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_stats/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_stats/filter
deleted file mode 100644
index 3844c2e..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/integration/hbase_stats/filter
+++ /dev/null
@@ -1,4 +0,0 @@
-sed -e 's#hdfs://[^/]*/#HDFS_URL/#' \
- -e '/^CreateTime:/d' \
- -e '/transient_lastDdlTime/d' \
- -e 's#^Owner:.*$#Owner: USER#'