You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/06/21 17:53:17 UTC
[07/12] hive git commit: HIVE-19882: Fix QTestUtil session lifecycle
(Zoltan Haindrich reviewed by Jason Dere)
HIVE-19882: Fix QTestUtil session lifecycle (Zoltan Haindrich reviewed by Jason Dere)
Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e6577a0d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e6577a0d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e6577a0d
Branch: refs/heads/master-txnstats
Commit: e6577a0d910ce7fc982f497848229e0195859dc1
Parents: aea5908
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Thu Jun 21 06:15:49 2018 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Thu Jun 21 06:15:49 2018 +0200
----------------------------------------------------------------------
data/scripts/q_test_init.sql | 52 ----
data/scripts/q_test_init_parse.sql | 10 +
.../hadoop/hive/ql/TestLocationQueries.java | 3 +-
.../apache/hadoop/hive/ql/TestMTQueries.java | 1 +
.../test/resources/testconfiguration.properties | 1 -
.../hadoop/hive/accumulo/AccumuloQTestUtil.java | 1 +
.../control/AbstractCoreBlobstoreCliDriver.java | 6 +-
.../hadoop/hive/cli/control/CliConfigs.java | 2 +-
.../hive/cli/control/CoreAccumuloCliDriver.java | 32 ++-
.../hadoop/hive/cli/control/CoreCliDriver.java | 8 +-
.../hive/cli/control/CoreCompareCliDriver.java | 9 +-
.../hive/cli/control/CoreHBaseCliDriver.java | 33 +--
.../cli/control/CoreHBaseNegativeCliDriver.java | 36 ++-
.../hive/cli/control/CoreNegativeCliDriver.java | 6 +-
.../hive/cli/control/CorePerfCliDriver.java | 5 +-
.../org/apache/hadoop/hive/ql/QTestUtil.java | 262 ++++++-------------
.../hadoop/hive/ql/parse/CoreParseNegative.java | 3 +-
.../clientpositive/druidkafkamini_basic.q | 2 +-
.../queries/positive/input_testsequencefile.q | 11 +
19 files changed, 192 insertions(+), 291 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/data/scripts/q_test_init.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init.sql b/data/scripts/q_test_init.sql
index a269c55..df05828 100644
--- a/data/scripts/q_test_init.sql
+++ b/data/scripts/q_test_init.sql
@@ -6,57 +6,5 @@ set hive.stats.dbclass=fs;
DROP FUNCTION IF EXISTS qtest_get_java_boolean;
CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean';
---
--- Table dest1
---
-DROP TABLE IF EXISTS dest1;
-
-CREATE TABLE dest1 (key STRING COMMENT 'default', value STRING COMMENT 'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest2
---
-DROP TABLE IF EXISTS dest2;
-
-CREATE TABLE dest2 (key STRING COMMENT 'default', value STRING COMMENT 'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest3
---
-DROP TABLE IF EXISTS dest3;
-
-CREATE TABLE dest3 (key STRING COMMENT 'default', value STRING COMMENT 'default')
-PARTITIONED BY (ds STRING, hr STRING)
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-ALTER TABLE dest3 ADD PARTITION (ds='2008-04-08',hr='12');
-
---
--- Table dest4
---
-DROP TABLE IF EXISTS dest4;
-
-CREATE TABLE dest4 (key STRING COMMENT 'default', value STRING COMMENT 'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
-
---
--- Table dest4_sequencefile
---
-DROP TABLE IF EXISTS dest4_sequencefile;
-
-CREATE TABLE dest4_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default')
-STORED AS
-INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
-OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
-
reset;
set hive.stats.dbclass=fs;
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/data/scripts/q_test_init_parse.sql
----------------------------------------------------------------------
diff --git a/data/scripts/q_test_init_parse.sql b/data/scripts/q_test_init_parse.sql
new file mode 100644
index 0000000..f84c847
--- /dev/null
+++ b/data/scripts/q_test_init_parse.sql
@@ -0,0 +1,10 @@
+--
+-- Table dest1
+--
+DROP TABLE IF EXISTS dest1;
+
+CREATE TABLE dest1 (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat';
+
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
index bd33325..31195c4 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -109,7 +109,8 @@ public class TestLocationQueries extends BaseTestQueries {
for (int i = 0; i < qfiles.length; i++) {
qt[i] = new CheckResults(resDir, logDir, MiniClusterType.none, "0.20", "parta");
- qt[i].addFile(qfiles[i]);
+ qt[i].newSession();
+ qt[i].addFile(qfiles[i], false);
qt[i].clearTestSideEffects();
}
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
index 6ed872d..3d8eb83 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
@@ -46,6 +46,7 @@ public class TestMTQueries extends BaseTestQueries {
util.getConf().set("hive.stats.dbclass", "fs");
util.getConf().set("hive.mapred.mode", "nonstrict");
util.getConf().set("hive.stats.column.autogather", "false");
+ util.newSession(true);
}
boolean success = QTestUtil.queryListRunnerMultiThreaded(qfiles, qts);
if (!success) {
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index aeb6211..a3ddbda 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -17,7 +17,6 @@ disabled.query.files=ql_rewrite_gbtoidx.q,\
union_stats.q,\
sample2.q,\
sample4.q,\
- sample6.q,\
root_dir_external_table.q,\
input31.q
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
index 0f9528f..956478d 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/accumulo/AccumuloQTestUtil.java
@@ -32,6 +32,7 @@ public class AccumuloQTestUtil extends QTestUtil {
super(outDir, logDir, miniMr, null, "0.20", initScript, cleanupScript, false);
setup.setupWithHiveConf(conf);
this.setup = setup;
+ this.savedConf = new HiveConf(conf);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
index 764a4d8..3cf5ebb 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
@@ -68,6 +68,7 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
// do a one time initialization
setupUniqueTestPath();
+ qt.newSession();
qt.cleanUp();
qt.createSources();
} catch (Exception e) {
@@ -82,7 +83,7 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
@Before
public void setUp() {
try {
- qt.clearTestSideEffects();
+ qt.newSession();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -95,6 +96,7 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
@After
public void tearDown() {
try {
+ qt.clearTestSideEffects();
qt.clearPostTestEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
@@ -131,7 +133,7 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
System.err.println("Begin query: " + fname);
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if ((ecode == 0) ^ expectSuccess) {
qt.failed(ecode, fname, debugHint);
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index 59a78d9..d07599e 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -84,7 +84,7 @@ public class CliConfigs {
setResultsDir("ql/src/test/results/compiler/errors");
setLogDir("itests/qtest/target/qfile-results/negative");
- setInitScript("q_test_init.sql");
+ setInitScript("q_test_init_parse.sql");
setCleanupScript("q_test_cleanup.sql");
setHiveConfDir("data/conf/perf-reg/");
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
index 648a05d..0d64cfa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.cli.control;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.File;
@@ -43,7 +44,7 @@ public class CoreAccumuloCliDriver extends CliAdapter {
@BeforeClass
public void beforeClass() {
setup = new AccumuloTestSetup();
-
+
MiniClusterType miniMR = cliConfig.getClusterType();
String initScript = cliConfig.getInitScript();
String cleanupScript = cliConfig.getCleanupScript();
@@ -51,20 +52,21 @@ public class CoreAccumuloCliDriver extends CliAdapter {
try {
qt = new AccumuloQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
setup, initScript, cleanupScript);
-
+
// do a one time initialization
+ qt.newSession();
qt.cleanUp();
qt.createSources();
} catch (Exception e) {
throw new RuntimeException("Unexpected exception in setUp",e);
}
}
-
+
@Override
@AfterClass
public void shutdown() throws Exception {
setup.tearDown();
-
+
try {
qt.shutdown();
}
@@ -72,16 +74,33 @@ public class CoreAccumuloCliDriver extends CliAdapter {
throw new RuntimeException("Unexpected exception in tearDown",e);
}
}
+
@Override
@Before
public void setUp() {
+ try {
+ qt.newSession();
+ } catch (Exception e) {
+ System.err.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.err.flush();
+ fail("Unexpected exception in setup");
+ }
}
@Override
@After
public void tearDown() {
+ try {
+ qt.clearPostTestEffects();
+ qt.clearTestSideEffects();
+ } catch (Exception e) {
+ System.err.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.err.flush();
+ fail("Unexpected exception in tearDown");
+ }
}
-
@Override
public void runTest(String tname, String fname, String fpath) throws Exception {
long startTime = System.currentTimeMillis();
@@ -90,8 +109,7 @@ public class CoreAccumuloCliDriver extends CliAdapter {
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
- qt.clearTestSideEffects();
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if (ecode != 0) {
qt.failed(ecode, fname, null);
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index e588592..1ead144 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -72,7 +72,8 @@ public class CoreCliDriver extends CliAdapter {
new ElapsedTimeLoggingWrapper<Void>() {
@Override
public Void invokeInternal() throws Exception {
- qt.cleanUp();
+ qt.newSession();
+ qt.cleanUp(); // I don't think this is neccessary...
return null;
}
}.invoke("Initialization cleanup done.", LOG, true);
@@ -100,7 +101,7 @@ public class CoreCliDriver extends CliAdapter {
new ElapsedTimeLoggingWrapper<Void>() {
@Override
public Void invokeInternal() throws Exception {
- qt.clearTestSideEffects();
+ qt.newSession();
return null;
}
}.invoke("PerTestSetup done.", LOG, false);
@@ -120,6 +121,7 @@ public class CoreCliDriver extends CliAdapter {
@Override
public Void invokeInternal() throws Exception {
qt.clearPostTestEffects();
+ qt.clearTestSideEffects();
return null;
}
}.invoke("PerTestTearDown done.", LOG, false);
@@ -165,7 +167,7 @@ public class CoreCliDriver extends CliAdapter {
System.err.println("Begin query: " + fname);
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if (ecode != 0) {
failed = true;
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
index 1ad76f9..6b4c6c6 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
@@ -56,6 +56,7 @@ public class CoreCompareCliDriver extends CliAdapter{
hiveConfDir, hadoopVer, initScript, cleanupScript, false);
// do a one time initialization
+ qt.newSession();
qt.cleanUp();
qt.createSources();
@@ -130,14 +131,14 @@ public class CoreCompareCliDriver extends CliAdapter{
}
int ecode = 0;
-
- qt.cliInit(new File(fpath), false);
-
+
+ qt.cliInit(new File(fpath));
+
List<String> outputs = new ArrayList<>(versionFiles.size());
for (String versionFile : versionFiles) {
// 1 for "_" after tname; 3 for ".qv" at the end. Version is in between.
String versionStr = versionFile.substring(tname.length() + 1, versionFile.length() - 3);
- outputs.add(qt.cliInit(new File(queryDirectory, tname + "." + versionStr), false));
+ outputs.add(qt.cliInit(new File(queryDirectory, tname + "." + versionStr)));
// TODO: will this work?
ecode = qt.executeClient(versionFile, fname);
if (ecode != 0) {
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
index fc5f75d..70cbf04 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
@@ -43,22 +43,23 @@ public class CoreHBaseCliDriver extends CliAdapter {
@Override
@BeforeClass
public void beforeClass() {
- MiniClusterType miniMR = cliConfig.getClusterType();
- String initScript = cliConfig.getInitScript();
- String cleanupScript =cliConfig.getCleanupScript();
+ MiniClusterType miniMR = cliConfig.getClusterType();
+ String initScript = cliConfig.getInitScript();
+ String cleanupScript = cliConfig.getCleanupScript();
- try {
- qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
+ try {
+ qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
setup, initScript, cleanupScript);
- qt.cleanUp(null);
- qt.createSources(null);
+ qt.newSession();
+ qt.cleanUp(null);
+ qt.createSources(null);
- } catch (Exception e) {
- System.err.println("Exception: " + e.getMessage());
- e.printStackTrace();
- System.err.flush();
- fail("Unexpected exception in static initialization: "+e.getMessage());
- }
+ } catch (Exception e) {
+ System.err.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.err.flush();
+ throw new RuntimeException(e);
+ }
}
@@ -66,7 +67,7 @@ public class CoreHBaseCliDriver extends CliAdapter {
@Before
public void setUp() {
try {
- qt.clearTestSideEffects();
+ qt.newSession();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -79,6 +80,7 @@ public class CoreHBaseCliDriver extends CliAdapter {
public void tearDown() {
try {
qt.clearPostTestEffects();
+ qt.clearTestSideEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -91,7 +93,6 @@ public class CoreHBaseCliDriver extends CliAdapter {
@AfterClass
public void shutdown() throws Exception {
try {
- // FIXME: there were 2 afterclass methods...i guess this is the right order...maybe not
qt.shutdown();
setup.tearDown();
} catch (Exception e) {
@@ -110,7 +111,7 @@ public class CoreHBaseCliDriver extends CliAdapter {
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if (ecode != 0) {
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
index 8fb88d0..c76a70e 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
@@ -42,13 +42,6 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
@Override
public void beforeClass() throws Exception {
- }
-
- // hmm..this looks a bit wierd...setup boots qtestutil...this part used to be in beforeclass
- @Override
- @Before
- public void setUp() {
-
MiniClusterType miniMR = cliConfig.getClusterType();
String initScript = cliConfig.getInitScript();
String cleanupScript = cliConfig.getCleanupScript();
@@ -64,11 +57,26 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
}
}
+ // hmm..this looks a bit wierd...setup boots qtestutil...this part used to be in beforeclass
+ @Override
+ @Before
+ public void setUp() {
+ try {
+ qt.newSession();
+ } catch (Exception e) {
+ System.err.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.err.flush();
+ fail("Unexpected exception in setup");
+ }
+ }
+
@Override
@After
public void tearDown() {
try {
- qt.shutdown();
+ qt.clearPostTestEffects();
+ qt.clearTestSideEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -80,6 +88,14 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
@Override
@AfterClass
public void shutdown() throws Exception {
+ try {
+ qt.shutdown();
+ } catch (Exception e) {
+ System.err.println("Exception: " + e.getMessage());
+ e.printStackTrace();
+ System.err.flush();
+ fail("Unexpected exception in tearDown");
+ }
// closeHBaseConnections
setup.tearDown();
}
@@ -89,11 +105,8 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
long startTime = System.currentTimeMillis();
try {
System.err.println("Begin query: " + fname);
-
qt.addFile(fpath);
-
qt.cliInit(new File(fpath));
- qt.clearTestSideEffects();
int ecode = qt.executeClient(fname);
if (ecode == 0) {
qt.failed(fname, null);
@@ -103,7 +116,6 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
if (result.getReturnCode() != 0) {
qt.failedDiff(result.getReturnCode(), fname, result.getCapturedOutput());
}
- qt.clearPostTestEffects();
} catch (Exception e) {
qt.failed(e, fname, null);
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
index 3be6f66..07ae6ac 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
@@ -50,6 +50,7 @@ public class CoreNegativeCliDriver extends CliAdapter{
qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR,
hiveConfDir, hadoopVer, initScript, cleanupScript, false);
// do a one time initialization
+ qt.newSession();
qt.cleanUp();
qt.createSources();
} catch (Exception e) {
@@ -64,7 +65,7 @@ public class CoreNegativeCliDriver extends CliAdapter{
@Before
public void setUp() {
try {
- qt.clearTestSideEffects();
+ qt.newSession();
} catch (Throwable e) {
e.printStackTrace();
System.err.flush();
@@ -76,6 +77,7 @@ public class CoreNegativeCliDriver extends CliAdapter{
@After
public void tearDown() {
try {
+ qt.clearTestSideEffects();
qt.clearPostTestEffects();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
@@ -118,7 +120,7 @@ public class CoreNegativeCliDriver extends CliAdapter{
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if (ecode == 0) {
qt.failed(fname, debugHint);
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
index af91866..55e744e 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
@@ -68,6 +68,7 @@ public class CorePerfCliDriver extends CliAdapter{
cleanupScript, false, null);
// do a one time initialization
+ qt.newSession();
qt.cleanUp();
qt.createSources();
// Manually modify the underlying metastore db to reflect statistics corresponding to
@@ -92,7 +93,7 @@ public class CorePerfCliDriver extends CliAdapter{
@Override
public void setUp() {
try {
- qt.clearPostTestEffects();
+ qt.newSession();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -126,7 +127,7 @@ public class CorePerfCliDriver extends CliAdapter{
System.err.println("Begin query: " + fname);
qt.addFile(fpath);
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
int ecode = qt.executeClient(fname);
if (ecode != 0) {
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2106fec..0bbd751 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -22,12 +22,10 @@ import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
-import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
@@ -158,6 +156,7 @@ public class QTestUtil {
private static final String DEFAULT_TEST_EC_POLICY = "RS-3-2-1024k";
private String testWarehouse;
+ @Deprecated
private final String testFiles;
private final File datasetDir;
protected final String outDir;
@@ -181,6 +180,7 @@ public class QTestUtil {
protected Hive db;
protected QueryState queryState;
protected HiveConf conf;
+ protected HiveConf savedConf;
private IDriver drv;
private BaseSemanticAnalyzer sem;
protected final boolean overWrite;
@@ -245,7 +245,7 @@ public class QTestUtil {
private CliDriver getCliDriver() {
if(cliDriver == null){
- cliDriver = new CliDriver();
+ throw new RuntimeException("no clidriver");
}
return cliDriver;
}
@@ -277,80 +277,6 @@ public class QTestUtil {
return conf;
}
- public boolean deleteDirectory(File path) {
- if (path.exists()) {
- File[] files = path.listFiles();
- for (File file : files) {
- if (file.isDirectory()) {
- deleteDirectory(file);
- } else {
- file.delete();
- }
- }
- }
- return (path.delete());
- }
-
- public void copyDirectoryToLocal(Path src, Path dest) throws Exception {
-
- FileSystem srcFs = src.getFileSystem(conf);
- FileSystem destFs = dest.getFileSystem(conf);
- if (srcFs.exists(src)) {
- FileStatus[] files = srcFs.listStatus(src);
- for (FileStatus file : files) {
- String name = file.getPath().getName();
- Path dfs_path = file.getPath();
- Path local_path = new Path(dest, name);
-
- // If this is a source table we do not copy it out
- if (getSrcTables().contains(name)) {
- continue;
- }
-
- if (file.isDirectory()) {
- if (!destFs.exists(local_path)) {
- destFs.mkdirs(local_path);
- }
- copyDirectoryToLocal(dfs_path, local_path);
- } else {
- srcFs.copyToLocalFile(dfs_path, local_path);
- }
- }
- }
- }
-
- static Pattern mapTok = Pattern.compile("(\\.?)(.*)_map_(.*)");
- static Pattern reduceTok = Pattern.compile("(.*)(reduce_[^\\.]*)((\\..*)?)");
-
- public void normalizeNames(File path) throws Exception {
- if (path.isDirectory()) {
- File[] files = path.listFiles();
- for (File file : files) {
- normalizeNames(file);
- }
- } else {
- Matcher m = reduceTok.matcher(path.getName());
- if (m.matches()) {
- String name = m.group(1) + "reduce" + m.group(3);
- path.renameTo(new File(path.getParent(), name));
- } else {
- m = mapTok.matcher(path.getName());
- if (m.matches()) {
- String name = m.group(1) + "map_" + m.group(3);
- path.renameTo(new File(path.getParent(), name));
- }
- }
- }
- }
-
- public String getOutputDirectory() {
- return outDir;
- }
-
- public String getLogDirectory() {
- return logDir;
- }
-
private String getHadoopMainVersion(String input) {
if (input == null) {
return null;
@@ -625,6 +551,7 @@ public class QTestUtil {
dataDir = new File(".").getAbsolutePath() + "/data/files";
}
testFiles = dataDir;
+ conf.set("test.data.dir", dataDir);
// Use path relative to dataDir directory if it is not specified
datasetDir = conf.get("test.data.set.files") == null
@@ -639,6 +566,7 @@ public class QTestUtil {
overWrite = "true".equalsIgnoreCase(System.getProperty("test.output.overwrite"));
init();
+ savedConf = new HiveConf(conf);
}
private String getScriptsDir() {
// Use the current directory if it is not specified
@@ -708,7 +636,7 @@ public class QTestUtil {
if (clusterType == MiniClusterType.druid || clusterType == MiniClusterType.druidKafka) {
final String tempDir = System.getProperty("test.tmp.dir");
druidCluster = new MiniDruidCluster("mini-druid",
- getLogDirectory(),
+ logDir,
tempDir,
setup.zkPort,
Utilities.jarFinderGetJar(MiniDruidCluster.class)
@@ -728,7 +656,7 @@ public class QTestUtil {
if(clusterType == MiniClusterType.kafka || clusterType == MiniClusterType.druidKafka) {
kafkaCluster = new SingleNodeKafkaCluster("kafka",
- getLogDirectory() + "/kafka-cluster",
+ logDir + "/kafka-cluster",
setup.zkPort
);
kafkaCluster.init(conf);
@@ -817,15 +745,7 @@ public class QTestUtil {
}
public void addFile(String queryFile) throws IOException {
- addFile(queryFile, false);
- }
-
- public void addFile(String queryFile, boolean partial) throws IOException {
- addFile(new File(queryFile));
- }
-
- public void addFile(File qf) throws IOException {
- addFile(qf, false);
+ addFile(new File(queryFile), false);
}
public void addFile(File qf, boolean partial) throws IOException {
@@ -1007,6 +927,40 @@ public class QTestUtil {
}
}
+ public void newSession() throws Exception {
+ newSession(true);
+ }
+
+ public void newSession(boolean canReuseSession) throws Exception {
+ // allocate and initialize a new conf since a test can
+ // modify conf by using 'set' commands
+ conf = new HiveConf(savedConf);
+ initConf();
+ initConfFromSetup();
+
+ // renew the metastore since the cluster type is unencrypted
+ db = Hive.get(conf); // propagate new conf to meta store
+
+ HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
+ "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
+ CliSessionState ss = new CliSessionState(conf);
+ assert ss != null;
+ ss.in = System.in;
+
+ SessionState oldSs = SessionState.get();
+
+ restartSessions(canReuseSession, ss, oldSs);
+
+ closeSession(oldSs);
+
+ SessionState.start(ss);
+
+ cliDriver = new CliDriver();
+
+ File outf = new File(logDir, "initialize.log");
+ setSessionOutputs("that_shouldnt_happen_there", ss, outf);
+
+ }
/**
* Clear out any side effects of running tests
*/
@@ -1014,36 +968,19 @@ public class QTestUtil {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
+ // the test might have configured security/etc; open a new session to get rid of that
+ newSession();
// Remove any cached results from the previous test.
+ Utilities.clearWorkMap(conf);
NotificationEventPoll.shutdown();
QueryResultsCache.cleanupInstance();
-
- // allocate and initialize a new conf since a test can
- // modify conf by using 'set' commands
- conf = new HiveConf(IDriver.class);
- initConf();
- initConfFromSetup();
-
- // renew the metastore since the cluster type is unencrypted
- db = Hive.get(conf); // propagate new conf to meta store
-
clearTablesCreatedDuringTests();
clearUDFsCreatedDuringTests();
clearKeysCreatedInTests();
StatsSources.clearGlobalStats();
}
- protected void clearSettingsCreatedInTests() throws IOException {
- getCliDriver().processLine(String.format("set hive.security.authorization.enabled=false;"));
- getCliDriver().processLine(String.format("set user.name=%s;",
- System.getProperty(TEST_HIVE_USER_PROPERTY, "hive_test_user")));
-
- getCliDriver().processLine("set hive.metastore.partition.name.whitelist.pattern=;");
- getCliDriver().processLine("set hive.test.mode=false;");
- getCliDriver().processLine("set hive.mapred.mode=nonstrict;");
- }
-
protected void initConfFromSetup() throws Exception {
setup.preTest(conf);
}
@@ -1060,6 +997,7 @@ public class QTestUtil {
if (System.getenv(QTEST_LEAVE_FILES) != null) {
return;
}
+ conf.setBoolean("hive.test.shutdown.phase", true);
clearTablesCreatedDuringTests();
clearUDFsCreatedDuringTests();
@@ -1160,24 +1098,27 @@ public class QTestUtil {
}
}
- private void initDataSetForTest(File file){
- getCliDriver().processLine("set test.data.dir=" + testFiles + ";");
+ private void initDataSetForTest(File file) throws Exception {
+ synchronized (QTestUtil.class) {
+ DatasetParser parser = new DatasetParser();
+ parser.parse(file);
- DatasetParser parser = new DatasetParser();
- parser.parse(file);
+ DatasetCollection datasets = parser.getDatasets();
- DatasetCollection datasets = parser.getDatasets();
- for (String table : datasets.getTables()){
- synchronized (QTestUtil.class){
+ Set<String> missingDatasets = datasets.getTables();
+ missingDatasets.removeAll(getSrcTables());
+ if (missingDatasets.isEmpty()) {
+ return;
+ }
+ newSession(true);
+ for (String table : missingDatasets) {
initDataset(table);
}
+ newSession(true);
}
}
- protected void initDataset(String table) {
- if (getSrcTables().contains(table)){
- return;
- }
+ protected void initDataset(String table) throws Exception {
File tableFile = new File(new File(datasetDir, table), Dataset.INIT_FILE_NAME);
String commands = null;
@@ -1225,30 +1166,21 @@ public class QTestUtil {
cliDriver.processCmd("set hive.cli.print.header=true;");
}
- public void cliInit(File file) throws Exception {
- cliInit(file, true);
- }
-
- public String cliInit(File file, boolean recreate) throws Exception {
+ public String cliInit(File file) throws Exception {
String fileName = file.getName();
- if (recreate) {
- cleanUp(fileName);
- createSources(fileName);
+ initDataSetForTest(file);
+
+ if (qNoSessionReuseQuerySet.contains(fileName)) {
+ newSession(false);
}
- clearSettingsCreatedInTests();
- initDataSetForTest(file);
+ CliSessionState ss = (CliSessionState) SessionState.get();
- HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
- "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
- Utilities.clearWorkMap(conf);
- CliSessionState ss = new CliSessionState(conf);
- assert ss != null;
- ss.in = System.in;
String outFileExtension = getOutFileExtension(fileName);
String stdoutName = null;
+
if (outDir != null) {
// TODO: why is this needed?
File qf = new File(outDir, fileName);
@@ -1256,22 +1188,9 @@ public class QTestUtil {
} else {
stdoutName = fileName + outFileExtension;
}
-
File outf = new File(logDir, stdoutName);
-
setSessionOutputs(fileName, ss, outf);
- SessionState oldSs = SessionState.get();
-
- boolean canReuseSession = !qNoSessionReuseQuerySet.contains(fileName);
- restartSessions(canReuseSession, ss, oldSs);
-
- closeSession(oldSs);
-
- SessionState.start(ss);
-
- cliDriver = new CliDriver();
-
if (fileName.equals("init_file.q")) {
ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
}
@@ -1283,6 +1202,12 @@ public class QTestUtil {
private void setSessionOutputs(String fileName, CliSessionState ss, File outf)
throws FileNotFoundException, Exception, UnsupportedEncodingException {
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
+ if (ss.out != null) {
+ ss.out.flush();
+ }
+ if (ss.err != null) {
+ ss.out.flush();
+ }
if (qSortQuerySet.contains(fileName)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(fileName)) {
@@ -1955,7 +1880,7 @@ public class QTestUtil {
qt.startSessionState(false);
// assumption is that environment has already been cleaned once globally
// hence each thread does not call cleanUp() and createSources() again
- qt.cliInit(file, false);
+ qt.cliInit(file);
qt.executeClient(file.getName());
} catch (Throwable e) {
System.err.println("Query file " + file.getName() + " failed with exception "
@@ -1986,7 +1911,7 @@ public class QTestUtil {
qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20",
initScript == null ? defaultInitScript : initScript,
cleanupScript == null ? defaultCleanupScript : cleanupScript, false);
- qt[i].addFile(qfiles[i]);
+ qt[i].addFile(qfiles[i], false);
qt[i].clearTestSideEffects();
}
@@ -2011,7 +1936,7 @@ public class QTestUtil {
qt[0].createSources();
for (int i = 0; i < qfiles.length && !failed; i++) {
qt[i].clearTestSideEffects();
- qt[i].cliInit(qfiles[i], false);
+ qt[i].cliInit(qfiles[i]);
qt[i].executeClient(qfiles[i].getName());
QTestProcessExecResult result = qt[i].checkCliDriverResults(qfiles[i].getName());
if (result.getReturnCode() != 0) {
@@ -2190,41 +2115,6 @@ public class QTestUtil {
(debugHint != null ? debugHint : ""));
}
- public static void addTestsToSuiteFromQfileNames(
- String qFileNamesFile,
- Set<String> qFilesToExecute,
- TestSuite suite,
- Object setup,
- SuiteAddTestFunctor suiteAddTestCallback) {
- try {
- File qFileNames = new File(qFileNamesFile);
- FileReader fr = new FileReader(qFileNames.getCanonicalFile());
- BufferedReader br = new BufferedReader(fr);
- String fName = null;
-
- while ((fName = br.readLine()) != null) {
- if (fName.isEmpty() || fName.trim().equals("")) {
- continue;
- }
-
- int eIdx = fName.indexOf('.');
-
- if (eIdx == -1) {
- continue;
- }
-
- String tName = fName.substring(0, eIdx);
-
- if (qFilesToExecute.isEmpty() || qFilesToExecute.contains(fName)) {
- suiteAddTestCallback.addTestToSuite(suite, setup, tName);
- }
- }
- br.close();
- } catch (Exception e) {
- Assert.fail("Unexpected exception " + org.apache.hadoop.util.StringUtils.stringifyException(e));
- }
- }
-
public QOutProcessor getQOutProcessor() {
return qOutProcessor;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
index a071689..8f5744d 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
@@ -57,6 +57,7 @@ public class CoreParseNegative extends CliAdapter{
String hadoopVer = cliConfig.getHadoopVersion();
qt = new QTestUtil((cliConfig.getResultsDir()), (cliConfig.getLogDir()), miniMR, null,
hadoopVer, initScript, cleanupScript, false);
+ qt.newSession();
} catch (Exception e) {
System.err.println("Exception: " + e.getMessage());
e.printStackTrace();
@@ -106,7 +107,7 @@ public class CoreParseNegative extends CliAdapter{
firstRun = false;
}
- qt.cliInit(new File(fpath), false);
+ qt.cliInit(new File(fpath));
ASTNode tree = qt.parseQuery(fname);
List<Task<? extends Serializable>> tasks = qt.analyzeAST(tree);
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
index 814890a..2eb9dbc 100644
--- a/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
+++ b/ql/src/test/queries/clientpositive/druidkafkamini_basic.q
@@ -16,7 +16,7 @@ CREATE TABLE druid_kafka_test(`__time` timestamp, page string, `user` string, la
ALTER TABLE druid_kafka_test SET TBLPROPERTIES('druid.kafka.ingestion' = 'START');
-!curl -ss http://localhost:8081/druid/indexer/v1/supervisor;
+!curl --noproxy * -ss http://localhost:8081/druid/indexer/v1/supervisor;
-- Sleep for some time for ingestion tasks to ingest events
!sleep 60;
http://git-wip-us.apache.org/repos/asf/hive/blob/e6577a0d/ql/src/test/queries/positive/input_testsequencefile.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/positive/input_testsequencefile.q b/ql/src/test/queries/positive/input_testsequencefile.q
index 2daa53a..979bb9a 100755
--- a/ql/src/test/queries/positive/input_testsequencefile.q
+++ b/ql/src/test/queries/positive/input_testsequencefile.q
@@ -1,3 +1,14 @@
--! qt:dataset:src
+
+--
+-- Table dest4_sequencefile
+--
+DROP TABLE IF EXISTS dest4_sequencefile;
+
+CREATE TABLE dest4_sequencefile (key STRING COMMENT 'default', value STRING COMMENT 'default')
+STORED AS
+INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
+
FROM src
INSERT OVERWRITE TABLE dest4_sequencefile SELECT src.key, src.value