You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by da...@apache.org on 2018/04/27 17:25:15 UTC
hive git commit: HIVE-19198: Few flaky hcatalog tests (Daniel Dai,
reviewed by Vaibhav Gumashta)
Repository: hive
Updated Branches:
refs/heads/master 20697513a -> e0b318269
HIVE-19198: Few flaky hcatalog tests (Daniel Dai, reviewed by Vaibhav Gumashta)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e0b31826
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e0b31826
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e0b31826
Branch: refs/heads/master
Commit: e0b3182695f28fb853db94e388b44be89ad7c99f
Parents: 2069751
Author: Daniel Dai <da...@gmail.com>
Authored: Fri Apr 27 10:25:02 2018 -0700
Committer: Daniel Dai <da...@gmail.com>
Committed: Fri Apr 27 10:25:02 2018 -0700
----------------------------------------------------------------------
.../apache/hive/hcatalog/cli/TestPermsGrp.java | 89 ++++++++++++--------
1 file changed, 55 insertions(+), 34 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/e0b31826/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
index 3c9d89a..3cf172b 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
@@ -18,7 +18,9 @@
*/
package org.apache.hive.hcatalog.cli;
+import java.io.BufferedReader;
import java.io.FileNotFoundException;
+import java.io.InputStreamReader;
import java.security.Policy;
import java.util.ArrayList;
import java.util.Arrays;
@@ -50,7 +52,6 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hive.hcatalog.DerbyPolicy;
-import org.apache.hive.hcatalog.ExitException;
import org.apache.hive.hcatalog.NoExitSecurityManager;
import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hive.hcatalog.common.HCatConstants;
@@ -91,22 +92,16 @@ public class TestPermsGrp extends TestCase {
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
-
- hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
- hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
- hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hcatConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 60, TimeUnit.SECONDS);
hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
- clientWH = new Warehouse(hcatConf);
- msc = new HiveMetaStoreClient(hcatConf);
- System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
- System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
- System.setProperty(HiveConf.ConfVars.METASTOREWAREHOUSE.varname,
+ hcatConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname,
MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.WAREHOUSE));
- System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
+ hcatConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.CONNECT_URL_KEY));
- System.setProperty(HiveConf.ConfVars.METASTOREURIS.varname,
+ hcatConf.set(HiveConf.ConfVars.METASTOREURIS.varname,
MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.THRIFT_URIS));
+ clientWH = new Warehouse(hcatConf);
+ msc = new HiveMetaStoreClient(hcatConf);
}
public void testCustomPerms() throws Exception {
@@ -125,13 +120,9 @@ public class TestPermsGrp extends TestCase {
cleanupTbl(dbName, tblName, typeName);
// Next user did specify perms.
- try {
- callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx-wx---"});
- fail();
- } catch (Exception e) {
- assertTrue(e instanceof ExitException);
- assertEquals(((ExitException) e).getStatus(), 0);
- }
+ int ret = callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE",
+ "-p", "rwx-wx---"});
+ assertEquals(ret, 0);
dfsPath = clientWH.getDefaultTablePath(db, tblName);
assertEquals(FsPermission.valueOf("drwx-wx---"), dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission());
@@ -140,12 +131,9 @@ public class TestPermsGrp extends TestCase {
// User specified perms in invalid format.
hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
// make sure create table fails.
- try {
- callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx"});
- fail();
- } catch (Exception me) {
- assertTrue(me instanceof ExitException);
- }
+ ret = callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx"});
+ assertFalse(ret == 0);
+
// No physical dir gets created.
dfsPath = clientWH.getDefaultTablePath(db, tblName);
try {
@@ -168,13 +156,9 @@ public class TestPermsGrp extends TestCase {
hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
- try {
- // create table must fail.
- callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
- fail();
- } catch (Exception me) {
- assertTrue(me instanceof SecurityException);
- }
+ // create table must fail.
+ ret = callHCatCli(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
+ assertFalse(ret == 0);
try {
// no metadata should get created.
@@ -198,13 +182,50 @@ public class TestPermsGrp extends TestCase {
}
}
- private void callHCatCli(String[] args) {
+ private int callHCatCli(String[] args) throws Exception {
List<String> argsList = new ArrayList<String>();
+ argsList.add(System.getProperty("java.home") + "/bin/java");
+ argsList.add(HCatCli.class.getName());
argsList.add("-Dhive.support.concurrency=false");
argsList
.add("-Dhive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
+ argsList.add("-D" + HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES.varname + "=3");
+ argsList.add("-D" + HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES.varname + "=3");
+ argsList.add("-D" + HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname + "=60");
+ argsList.add("-D" + HiveConf.ConfVars.METASTOREWAREHOUSE.varname + "="
+ + MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.WAREHOUSE));
+ argsList.add("-D" + HiveConf.ConfVars.METASTORECONNECTURLKEY.varname + "="
+ + MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.CONNECT_URL_KEY));
+ argsList.add("-D" + HiveConf.ConfVars.METASTOREURIS.varname + "="
+ + MetastoreConf.getVar(hcatConf, MetastoreConf.ConfVars.THRIFT_URIS));
+ argsList.add("-D" + HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname + "=" + HCatSemanticAnalyzer.class.getName());
+ argsList.add("-D" + HiveConf.ConfVars.PREEXECHOOKS.varname + "=");
+ argsList.add("-D" + HiveConf.ConfVars.POSTEXECHOOKS.varname + "=");
+ argsList.add("-D" + HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=false");
+
+ argsList.add("-D" + "test.warehouse.dir=" + System.getProperty("test.warehouse.dir"));
argsList.addAll(Arrays.asList(args));
- HCatCli.main(argsList.toArray(new String[]{}));
+ ProcessBuilder builder = new ProcessBuilder().command(argsList.toArray(new String[] {}));
+ builder.environment().put("CLASSPATH", System.getProperty("java.class.path"));
+
+ Process p = builder.start();
+
+ String line;
+ BufferedReader r = new BufferedReader(new InputStreamReader(p.getInputStream()));
+ while ((line = r.readLine()) != null) {
+ System.out.println(line);
+ }
+ r.close();
+
+ r = new BufferedReader(new InputStreamReader(p.getErrorStream()));
+ while ((line = r.readLine()) != null) {
+ System.err.println(line);
+ }
+ r.close();
+
+ int ret = p.waitFor();
+
+ return ret;
}
private void silentDropDatabase(String dbName) throws MetaException, TException {