You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by an...@apache.org on 2016/04/26 05:47:15 UTC

[3/3] sentry git commit: SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao).

SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao).


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/66b32afa
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/66b32afa
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/66b32afa

Branch: refs/heads/master
Commit: 66b32afa87fab816af972d68b253f46c53ec7f58
Parents: a051630
Author: Anne Yu <an...@cloudera.com>
Authored: Mon Apr 25 15:04:09 2016 -0700
Committer: Anne Yu <an...@cloudera.com>
Committed: Mon Apr 25 21:18:27 2016 -0700

----------------------------------------------------------------------
 sentry-tests/sentry-tests-hive/pom.xml          |    6 +-
 .../dbprovider/AbstractTestWithDbProvider.java  |    8 +
 .../tests/e2e/dbprovider/TestDbConnections.java |    4 +
 .../tests/e2e/dbprovider/TestDbOperations.java  |   37 -
 .../e2e/dbprovider/TestDbOperationsPart1.java   |   40 +
 .../e2e/dbprovider/TestDbOperationsPart2.java   |   39 +
 .../TestDbPrivilegesAtTableScope.java           |   39 -
 .../TestDbPrivilegesAtTableScopePart1.java      |   39 +
 .../TestDbPrivilegesAtTableScopePart2.java      |   39 +
 .../tests/e2e/hdfs/TestHDFSIntegration.java     |   12 +-
 .../e2e/hdfs/TestHDFSIntegrationWithHA.java     |    2 +
 .../AbstractTestWithStaticConfiguration.java    |   42 +-
 .../sentry/tests/e2e/hive/TestOperations.java   | 1125 ------------------
 .../tests/e2e/hive/TestOperationsPart1.java     |  566 +++++++++
 .../tests/e2e/hive/TestOperationsPart2.java     |  663 +++++++++++
 .../e2e/hive/TestPrivilegesAtTableScope.java    |  662 -----------
 .../hive/TestPrivilegesAtTableScopePart1.java   |  406 +++++++
 .../hive/TestPrivilegesAtTableScopePart2.java   |  337 ++++++
 18 files changed, 2195 insertions(+), 1871 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/pom.xml b/sentry-tests/sentry-tests-hive/pom.xml
index 9d2ef76..e36b5c0 100644
--- a/sentry-tests/sentry-tests-hive/pom.xml
+++ b/sentry-tests/sentry-tests-hive/pom.xml
@@ -466,13 +466,15 @@ limitations under the License.
           <include>**/TestDbPerDatabasePolicyFile.java</include>
           <include>**/TestDbPrivilegeAtTransform.java</include>
           <include>**/TestDbPrivilegesAtDatabaseScope.java</include>
-          <include>**/TestDbPrivilegesAtTableScope.java</include>
+          <include>**/TestDbPrivilegesAtTableScopePart1.java</include>
+          <include>**/TestDbPrivilegesAtTableScopePart2.java</include>
           <include>**/TestDbSandboxOps.java</include>
           <include>**/TestDbExportImportPrivileges.java</include>
           <include>**/TestDbUriPermissions.java</include>
           <include>**/TestDbRuntimeMetadataRetrieval.java</include>
           <include>**/TestDatabaseProvider.java</include>
-          <include>**/TestDbOperations.java</include>
+          <include>**/TestDbOperationsPart1.java</include>
+          <include>**/TestDbOperationsPart2.java</include>
           <include>**/TestPrivilegeWithGrantOption.java</include>
           <include>**/TestDbPrivilegesAtColumnScope.java</include>
           <include>**/TestColumnEndToEnd.java</include>

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
index 17a2d1e..7d36d73 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
@@ -42,6 +42,9 @@ import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
 import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.Timeout;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
@@ -49,6 +52,11 @@ import com.google.common.io.Files;
 
 public abstract class AbstractTestWithDbProvider extends AbstractTestWithHiveServer {
 
+  @ClassRule
+  public static Timeout classTimeout = new Timeout(600000); //millis, each class runs less than 600s (10m)
+  @Rule
+  public Timeout timeout = new Timeout(180000); //millis, each test runs less than 180s (3m)
+
   protected static final String SERVER_HOST = "localhost";
 
   protected static Map<String, String> properties = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
index 2af0536..49fb182 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
@@ -26,6 +26,9 @@ import org.apache.sentry.provider.db.SentryAccessDeniedException;
 import org.apache.sentry.provider.db.SentryAlreadyExistsException;
 import org.apache.sentry.provider.file.PolicyFile;
 import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+
+import static org.junit.Assume.assumeThat;
+import static org.hamcrest.Matchers.is;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -44,6 +47,7 @@ public class TestDbConnections extends AbstractTestWithStaticConfiguration {
     super.setupAdmin();
     super.setup();
     PolicyFile.setAdminOnServer1(ADMINGROUP);
+    assumeThat(getSentrySrv().getNumActiveClients(), is(0L));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
deleted file mode 100644
index 3fab344..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.tests.e2e.dbprovider;
-
-import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
-import org.apache.sentry.tests.e2e.hive.TestOperations;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-public class TestDbOperations extends TestOperations{
-  @Override
-  @Before
-  public void setup() throws Exception {
-    super.setupAdmin();
-    super.setup();
-  }
-  @BeforeClass
-  public static void setupTestStaticConfiguration() throws Exception {
-    useSentryService = true;
-    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
new file mode 100644
index 0000000..4ccf270
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.dbprovider;
+
+/**
+ * Test db operations part 1
+ */
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestOperationsPart1;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbOperationsPart1 extends TestOperationsPart1 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
new file mode 100644
index 0000000..d57d669
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestOperationsPart2;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Test db operations part 2
+ */
+public class TestDbOperationsPart2 extends TestOperationsPart2 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
deleted file mode 100644
index a4f07df..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.tests.e2e.dbprovider;
-
-import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
-import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScope;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-public class TestDbPrivilegesAtTableScope extends TestPrivilegesAtTableScope {
-  @Override
-  @Before
-  public void setup() throws Exception {
-    super.setupAdmin();
-    super.setup();
-    prepareDBDataForTest();
-  }
-  @BeforeClass
-  public static void setupTestStaticConfiguration() throws Exception {
-    useSentryService = true;
-    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
new file mode 100644
index 0000000..9bb476c
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScopePart1;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbPrivilegesAtTableScopePart1 extends TestPrivilegesAtTableScopePart1 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+    prepareDBDataForTest();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
new file mode 100644
index 0000000..e21d344
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScopePart2;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbPrivilegesAtTableScopePart2 extends TestPrivilegesAtTableScopePart2 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+    prepareDBDataForTest();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
index 99242d7..c3a5379 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
@@ -40,8 +40,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import com.google.common.base.Preconditions;
 
 import org.apache.sentry.core.common.utils.PathUtils;
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -83,11 +81,17 @@ import org.apache.sentry.tests.e2e.hive.hiveserver.InternalMetastoreServer;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrv;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory;
 import org.fest.reflect.core.Reflection;
+
+import org.junit.Assert;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.Timeout;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -101,6 +105,10 @@ public class TestHDFSIntegration {
   private static final Logger LOGGER = LoggerFactory
       .getLogger(TestHDFSIntegration.class);
 
+  @ClassRule
+  public static Timeout classTimeout = new Timeout(600000); //millis, each class runs less than 600s (10m)
+  @Rule
+  public Timeout timeout = new Timeout(180000); //millis, each test runs less than 180s (3m)
 
   public static class WordCountMapper extends MapReduceBase implements
       Mapper<LongWritable, Text, String, Long> {

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
index 92c0693..be6d082 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
@@ -18,7 +18,9 @@
 package org.apache.sentry.tests.e2e.hdfs;
 
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 
+@Ignore ("Disable sentry HA tests for now")
 public class TestHDFSIntegrationWithHA extends TestHDFSIntegration {
   @BeforeClass
   public static void setup() throws Exception {

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
index cb5039b..d2a1d36 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
@@ -35,7 +35,18 @@ import java.util.Map;
 import java.util.HashSet;
 
 import com.google.common.collect.Sets;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.RuleChain;
+import org.junit.rules.Timeout;
+import org.junit.rules.TestRule;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -65,10 +76,7 @@ import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory.SentrySrvType;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrv;
 import org.apache.tools.ant.util.StringUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -82,6 +90,32 @@ import javax.security.auth.login.LoginContext;
 public abstract class AbstractTestWithStaticConfiguration {
   private static final Logger LOGGER = LoggerFactory
       .getLogger(AbstractTestWithStaticConfiguration.class);
+
+  @ClassRule
+  public final static TestRule timeoutClass = RuleChain
+      .outerRule(new TestWatcher() {
+        @Override
+        protected void failed(Throwable e, Description description) {
+          LOGGER.error("Time out = " + e);
+          if (e.getMessage().contains("test timed out after")) {
+            LOGGER.error("Test class time out, but caught by rule, description = " + description + "ex = " + e);
+          }
+        }
+      })
+      .around(new Timeout(600000)); //millis, each test runs less than 600s (or 10m)
+
+  @Rule
+  public final TestRule timeout = RuleChain
+      .outerRule(new TestWatcher() {
+        @Override
+        protected void failed(Throwable e, Description description) {
+          if (e.getMessage().contains("test timed out after")) {
+            LOGGER.error("Test method time out, but caught by rule, description = " + description + "ex = " + e);
+          }
+        }
+      })
+      .around(new Timeout(180000)); //millis, each test runs less than 180s (or 3m)
+
   protected static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat";
   protected static final String ALL_DB1 = "server=server1->db=db_1",
       ALL_DB2 = "server=server1->db=db_2",

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
deleted file mode 100644
index 77106d4..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
+++ /dev/null
@@ -1,1125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.tests.e2e.hive;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.sentry.provider.file.PolicyFile;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import com.google.common.io.Resources;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestOperations extends AbstractTestWithStaticConfiguration {
-  private static final Logger LOGGER = LoggerFactory
-      .getLogger(TestOperations.class);
-
-  private PolicyFile policyFile;
-  final String tableName = "tb1";
-
-  static Map<String, String> privileges = new HashMap<String, String>();
-  static {
-    privileges.put("all_server", "server=server1->action=all");
-    privileges.put("create_server", "server=server1->action=create");
-    privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all");
-    privileges.put("select_db1", "server=server1->db=" + DB1 + "->action=select");
-    privileges.put("insert_db1", "server=server1->db=" + DB1 + "->action=insert");
-    privileges.put("create_db1", "server=server1->db=" + DB1 + "->action=create");
-    privileges.put("drop_db1", "server=server1->db=" + DB1 + "->action=drop");
-    privileges.put("alter_db1", "server=server1->db=" + DB1 + "->action=alter");
-    privileges.put("create_db2", "server=server1->db=" + DB2 + "->action=create");
-
-    privileges.put("all_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=all");
-    privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=select");
-    privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=insert");
-    privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=alter");
-    privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + "->table=ptab->action=alter");
-    privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=index");
-    privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=lock");
-    privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=drop");
-    privileges.put("insert_db2_tb2", "server=server1->db=" + DB2 + "->table=tb2->action=insert");
-    privileges.put("select_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=select");
-
-  }
-
-  @Before
-  public void setup() throws Exception{
-    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-  }
-
-  private void adminCreate(String db, String table) throws Exception{
-    adminCreate(db, table, false);
-  }
-
-  private void adminCreate(String db, String table, boolean partitioned) throws Exception{
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
-    statement.execute("CREATE DATABASE " + db);
-    if(table !=null) {
-      if (partitioned) {
-        statement.execute("CREATE table  " + db + "." + table + " (a string) PARTITIONED BY (b string)");
-      } else{
-        statement.execute("CREATE table  " + db + "." + table + " (a string)");
-      }
-
-    }
-    statement.close();
-    connection.close();
-  }
-
-  private void adminCreatePartition() throws Exception{
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require create on Server
-  1. Create database : HiveOperation.CREATEDATABASE
-   */
-  @Test
-  public void testCreateOnServer() throws Exception{
-    policyFile
-        .addPermissionsToRole("create_server", privileges.get("create_server"))
-        .addRolesToGroup(USERGROUP1, "create_server");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Create database " + DB2);
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addRolesToGroup(USERGROUP2, "create_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "CREATE database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-
-  }
-
-  @Test
-  public void testInsertInto() throws Exception{
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
-        .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
-
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("create table bar (key int)");
-    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table bar");
-    statement.execute("create table foo (key int) partitioned by (part int) stored as parquet");
-    statement.execute("insert into table foo PARTITION(part=1) select key from bar");
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require create on Database alone
-  1. Create table : HiveOperation.CREATETABLE
-  */
-  @Test
-  public void testCreateOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP1, "create_db1")
-        .addRolesToGroup(USERGROUP2, "all_db1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("CREATE TABLE " + DB1 + ".tb2(a int)");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("CREATE TABLE " + DB1 + ".tb3(a int)");
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("all_db1_tb1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "CREATE TABLE " + DB1 + ".tb1(a int)", semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require drop on Database alone
-  1. Drop database : HiveOperation.DROPDATABASE
-  */
-  @Test
-  public void testDropOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("drop_db1", privileges.get("drop_db1"))
-        .addRolesToGroup(USERGROUP1, "drop_db1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE " + DB1);
-    statement.close();
-    connection.close();
-
-    adminCreate(DB1, null);
-
-    policyFile
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP2, "all_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE " + DB1);
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "select_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "drop database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require alter on Database alone
-  1. Alter database : HiveOperation.ALTERDATABASE
-   */
-  @Test
-  public void testAlterOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("alter_db1", privileges.get("alter_db1"))
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP2, "all_db1")
-        .addRolesToGroup(USERGROUP1, "alter_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "select_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')", semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* SELECT/INSERT on DATABASE
-   1. HiveOperation.DESCDATABASE
-   */
-  @Test
-  public void testDescDB() throws Exception {
-    adminCreate(DB1, tableName);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
-        .addRolesToGroup(USERGROUP1, "select_db1")
-        .addRolesToGroup(USERGROUP2, "insert_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("describe database " + DB1);
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("describe database " + DB1);
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
-    writePolicyFile(policyFile);
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "describe database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-
-  }
-
-  private void assertSemanticException(Statement stmt, String command) throws SQLException{
-    context.assertSentrySemanticException(stmt, command, semanticException);
-  }
-
-  /*
-  1. Analyze table (HiveOperation.QUERY) : select + insert on table
-   */
-  @Test
-  public void testSelectAndInsertOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ANALYZE TABLE tb1 PARTITION (b='1' ) COMPUTE STATISTICS");
-    statement.close();
-    connection.close();
-  }
-
-  /* Operations which require select on table alone
-  1. HiveOperation.QUERY
-  2. HiveOperation.SHOW_TBLPROPERTIES
-  3. HiveOperation.SHOW_CREATETABLE
-  4. HiveOperation.SHOWINDEXES
-  5. HiveOperation.SHOWCOLUMNS
-  6. Describe tb1 : HiveOperation.DESCTABLE5.
-  7. HiveOperation.SHOWPARTITIONS
-  8. TODO: show functions?
-  9. HiveOperation.SHOW_TABLESTATUS
-   */
-  @Test
-  public void testSelectOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("select * from tb1");
-
-    statement.executeQuery("SHOW Partitions tb1");
-    statement.executeQuery("SHOW TBLPROPERTIES tb1");
-    statement.executeQuery("SHOW CREATE TABLE tb1");
-    statement.executeQuery("SHOW indexes on tb1");
-    statement.executeQuery("SHOW COLUMNS from tb1");
-    statement.executeQuery("SHOW functions '.*'");
-    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
-
-    statement.executeQuery("DESCRIBE tb1");
-    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB2, tableName);
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "select * from tb1", semanticException);
-    context.assertSentrySemanticException(statement,
-        "SHOW TABLE EXTENDED IN " + DB2 + " LIKE 'tb*'", semanticException);
-
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /* Operations which require insert on table alone
-  1. HiveOperation.SHOW_TBLPROPERTIES
-  2. HiveOperation.SHOW_CREATETABLE
-  3. HiveOperation.SHOWINDEXES
-  4. HiveOperation.SHOWCOLUMNS
-  5. HiveOperation.DESCTABLE
-  6. HiveOperation.SHOWPARTITIONS
-  7. TODO: show functions?
-  8. TODO: lock, unlock, Show locks
-  9. HiveOperation.SHOW_TABLESTATUS
-   */
-  @Test
-  public void testInsertOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    /*statement.execute("LOCK TABLE tb1 EXCLUSIVE");
-    statement.execute("UNLOCK TABLE tb1");
-    */
-    statement.executeQuery("SHOW TBLPROPERTIES tb1");
-    statement.executeQuery("SHOW CREATE TABLE tb1");
-    statement.executeQuery("SHOW indexes on tb1");
-    statement.executeQuery("SHOW COLUMNS from tb1");
-    statement.executeQuery("SHOW functions '.*'");
-    //statement.executeQuery("SHOW LOCKS tb1");
-    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
-
-    //NoViableAltException
-    //statement.executeQuery("SHOW transactions");
-    //statement.executeQuery("SHOW compactions");
-    statement.executeQuery("DESCRIBE tb1");
-    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
-    statement.executeQuery("SHOW Partitions tb1");
-
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require alter on table
-  1. HiveOperation.ALTERTABLE_PROPERTIES
-  2. HiveOperation.ALTERTABLE_SERDEPROPERTIES
-  3. HiveOperation.ALTERTABLE_CLUSTER_SORT
-  4. HiveOperation.ALTERTABLE_TOUCH
-  5. HiveOperation.ALTERTABLE_PROTECTMODE
-  6. HiveOperation.ALTERTABLE_FILEFORMAT
-  7. HiveOperation.ALTERTABLE_RENAMEPART
-  8. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
-  9. TODO: archive partition
-  10. TODO: unarchive partition
-  11. HiveOperation.ALTERPARTITION_FILEFORMAT
-  12. TODO: partition touch (is it same as  HiveOperation.ALTERTABLE_TOUCH?)
-  13. HiveOperation.ALTERPARTITION_PROTECTMODE
-  14. HiveOperation.ALTERTABLE_RENAMECOL
-  15. HiveOperation.ALTERTABLE_ADDCOLS
-  16. HiveOperation.ALTERTABLE_REPLACECOLS
-  17. TODO: HiveOperation.ALTERVIEW_PROPERTIES
-  18. TODO: HiveOperation.ALTERTABLE_SERIALIZER
-  19. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
-   */
-  @Test
-  public void testAlterTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-
-    Connection connection;
-    Statement statement;
-    //Setup
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-    statement.execute("DROP TABLE IF EXISTS ptab");
-    statement.execute("CREATE TABLE ptab (a int) STORED AS PARQUET");
-
-    policyFile
-      .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-      .addPermissionsToRole("alter_db1_ptab", privileges.get("alter_db1_ptab"))
-      .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "alter_db1_ptab")
-      .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-      .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    //Negative test cases
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
-    assertSemanticException(statement, "ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
-    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH");
-    assertSemanticException(statement, "ALTER TABLE tb1 ENABLE NO_DROP");
-    assertSemanticException(statement, "ALTER TABLE tb1 DISABLE OFFLINE");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET FILEFORMAT RCFILE");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) RENAME TO PARTITION (b = 2)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET SERDEPROPERTIES ('field.delim' = ',')");
-    //assertSemanticException(statement, "ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
-    //assertSemanticException(statement, "ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET FILEFORMAT RCFILE");
-    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH PARTITION (b = 10)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE NO_DROP");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE OFFLINE");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c int");
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
-    assertSemanticException(statement, "ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
-    assertSemanticException(statement, "MSCK REPAIR TABLE tb1");
-
-    //assertSemanticException(statement, "ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-
-
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-    statement.execute("ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
-    statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
-    statement.execute("ALTER TABLE tb1 TOUCH");
-    statement.execute("ALTER TABLE tb1 ENABLE NO_DROP");
-    statement.execute("ALTER TABLE tb1 DISABLE OFFLINE");
-    statement.execute("ALTER TABLE tb1 SET FILEFORMAT RCFILE");
-
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 1) RENAME TO PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET SERDEPROPERTIES ('field.delim' = ',')");
-    //statement.execute("ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
-    //statement.execute("ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET FILEFORMAT RCFILE");
-    statement.execute("ALTER TABLE tb1 TOUCH PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE NO_DROP");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE OFFLINE");
-
-    statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
-    statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
-    statement.execute("ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
-    statement.execute("MSCK REPAIR TABLE tb1");
-
-    //statement.execute("ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require index on table alone
-  1. Create index : HiveOperation.CREATEINDEX
-  2. Drop index : HiveOperation.DROPINDEX
-  3. HiveOperation.ALTERINDEX_REBUILD
-  4. TODO: HiveOperation.ALTERINDEX_PROPS
-  */
-  @Test
-  public void testIndexTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("index_db1_tb1", privileges.get("index_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "index_db1_tb1")
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("CREATE INDEX table01_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
-    statement.execute("ALTER INDEX table01_index ON tb1 REBUILD");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "CREATE INDEX table02_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
-    assertSemanticException(statement, "ALTER INDEX table01_index ON tb1 REBUILD");
-    assertSemanticException(statement, "DROP INDEX table01_index ON tb1");
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("DROP INDEX table01_index ON tb1");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require drop on table alone
-  1. Create index : HiveOperation.DROPTABLE
-  */
-  @Test
-  public void testDropTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "drop_db1_tb1")
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "drop table " + tableName);
-
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("drop table " + tableName);
-
-    statement.close();
-    connection.close();
-  }
-
-  @Ignore
-  @Test
-  public void testLockTable() throws Exception {
-   //TODO
-  }
-
-  /* Operations that require alter + drop on table
-    1. HiveOperation.ALTERTABLE_DROPPARTS
-  */
-  @Test
-  public void dropPartition() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "drop_db1_tb1")
-        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-    //Setup
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 DROP PARTITION (b = 10)");
-
-    //Positive case
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 DROP PARTITION (b = 10)");
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   1. HiveOperation.ALTERTABLE_RENAME
-   */
-  @Test
-  public void renameTable() throws Exception {
-    adminCreate(DB1, tableName);
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "create_db1")
-        .addRolesToGroup(USERGROUP2, "create_db1")
-        .addRolesToGroup(USERGROUP3, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Negative cases
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-
-    //Positive case
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations which require alter on table (+ all on URI)
-   1. HiveOperation.ALTERTABLE_LOCATION
-   2. HiveOperation.ALTERTABLE_ADDPARTS
-   3. TODO: HiveOperation.ALTERPARTITION_LOCATION
-   4. TODO: HiveOperation.ALTERTBLPART_SKEWED_LOCATION
-   */
-  @Test
-  public void testAlterOnTableAndURI() throws Exception {
-    adminCreate(DB1, tableName, true);
-    String tabLocation = dfs.getBaseDir() + "/" + Math.random();
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=" + tabLocation)
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    //Case with out uri
-    Connection connection = context.createConnection(USER2_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-    statement.close();
-    connection.close();
-
-    //Negative case: User2_1 has privileges on table but on on uri
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'",
-        semanticException);
-    context.assertSentrySemanticException(statement,
-        "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Negative case: User3_1 has only insert privileges on table
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '2') ");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '"
-        + tabLocation + "/part'");
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /* Create on Database and select on table
-  1. Create view :  HiveOperation.CREATEVIEW
-   */
-  @Test
-  public void testCreateView() throws Exception {
-    adminCreate(DB1, tableName);
-    adminCreate(DB2, null);
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("create_db2", privileges.get("create_db2"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("use " + DB2);
-    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "create_db2");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    context.assertSentrySemanticException(statement, "create view view1 as select a from " + DB1 + ".tb1",
-        semanticException);
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /*
-   1. HiveOperation.IMPORT : Create on db + all on URI
-   2. HiveOperation.EXPORT : SELECT on table + all on uri
-   */
-
-  @Test
-  public void testExportImport() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    dropDb(ADMIN1, DB1);
-    createDb(ADMIN1, DB1);
-    createTable(ADMIN1, DB1, dataFile, tableName);
-    String location = dfs.getBaseDir() + "/" + Math.random();
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri="+ location)
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "create_db1", "all_uri")
-        .addRolesToGroup(USERGROUP3, "insert_db1", "all_uri");
-    writePolicyFile(policyFile);
-    Connection connection;
-    Statement statement;
-
-    //Negative case
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "export table tb1 to '" + location + "'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Positive
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("export table tb1 to '" + location + "'" );
-    statement.close();
-    connection.close();
-
-    //Negative
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "import table tb2 from '" + location + "'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Positive
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("import table tb2 from '" + location + "'");
-    statement.close();
-    connection.close();
-
-  }
-
-  /*
-  1. HiveOperation.LOAD: INSERT on table + all on uri
-   */
-  @Test
-  public void testLoad() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    adminCreate(DB1, tableName);
-
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
-        .addRolesToGroup(USERGROUP1, "insert_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table tb1" );
-    statement.close();
-    connection.close();
-  }
-
-  /*
-  1. HiveOperation.CREATETABLE_AS_SELECT : Create on db + select on table
-   */
-  @Test
-  public void testCTAS() throws Exception {
-    adminCreate(DB1, tableName);
-    adminCreate(DB2, null);
-
-    String location = dfs.getBaseDir() + "/" + Math.random();
-
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    policyFile
-      .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-      .addPermissionsToRole("select_db1_view1", privileges.get("select_db1_view1"))
-      .addPermissionsToRole("create_db2", privileges.get("create_db2"))
-      .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
-      .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2")
-      .addRolesToGroup(USERGROUP2, "select_db1_view1", "create_db2")
-      .addRolesToGroup(USERGROUP3, "select_db1_tb1", "create_db2,all_uri");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb2 as select a from " + DB1 + ".tb1");
-    //Ensure CTAS fails without URI
-    context.assertSentrySemanticException(statement, "create table tb3 location '" + location +
-        "' as select a from " + DB1 + ".tb1",
-      semanticException);
-    context.assertSentrySemanticException(statement, "create table tb3 as select a from " + DB1 + ".view1",
-      semanticException);
-
-
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb3 as select a from " + DB1 + ".view1" );
-    context.assertSentrySemanticException(statement, "create table tb4 as select a from " + DB1 + ".tb1",
-      semanticException);
-
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    //CTAS is valid with URI
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb4 location '" + location +
-      "' as select a from " + DB1 + ".tb1");
-
-    statement.close();
-    connection.close();
-
-  }
-
-
-  /*
-  1. INSERT : IP: select on table, OP: insert on table + all on uri(optional)
-   */
-  @Test
-  public void testInsert() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    dropDb(ADMIN1, DB1);
-    dropDb(ADMIN1, DB2);
-    createDb(ADMIN1, DB1);
-    createDb(ADMIN1, DB2);
-    createTable(ADMIN1, DB1, dataFile, tableName);
-    createTable(ADMIN1, DB2, null, "tb2");
-    String location = dfs.getBaseDir() + "/" + Math.random();
-
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db2_tb2", privileges.get("insert_db2_tb2"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db2_tb2")
-        .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
-        .addRolesToGroup(USERGROUP2, "select_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    assertSemanticException(statement, "insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1");
-    statement.execute("insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1" );
-    assertSemanticException(statement, "insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-  }
-
-  @Test
-  public void testFullyQualifiedTableName() throws Exception{
-    Connection connection;
-    Statement statement;
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("create database " + DB1);
-    statement.execute("create table " + DB1 + ".tb1(a int)");
-    statement.execute("DROP table " + DB1 + ".tb1");
-    statement.execute("create table " + DB1 + ".tb1(a int)");
-    statement.execute("use " + DB1);
-    statement.execute("drop table tb1");
-  }
-
-  @Test
-  public void testExternalTables() throws Exception{
-    createDb(ADMIN1, DB1);
-    File externalTblDir = new File(dataDir, "exttab");
-    assertTrue("Unable to create directory for external table test" , externalTblDir.mkdir());
-
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir.getPath())
-        .addRolesToGroup(USERGROUP1, "create_db1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "create_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER2_1);
-    Statement statement = context.createStatement(connection);
-    assertSemanticException(statement, "create external table " + DB1 + ".tb1(a int) stored as " +
-        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
-    //Create external table on HDFS
-    assertSemanticException(statement, "create external table " + DB1 + ".tb2(a int) location '/user/hive/warehouse/blah'");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("create external table " + DB1 + ".tb1(a int) stored as " +
-        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
-    statement.close();
-    connection.close();
-
-
-  }
-
-  @Test
-  public void testCaseSensitivity() throws Exception {
-    Statement statement = null;
-    Connection connection = null;
-    try {
-      createDb(ADMIN1, DB1);
-      String scratchLikeDir = context.getProperty(HiveConf.ConfVars.SCRATCHDIR.varname);
-      LOGGER.info("scratch like dir = " + scratchLikeDir);
-      String extParentDir = scratchLikeDir + "/ABC/hhh";
-      String extTableDir = scratchLikeDir + "/abc/hhh";
-      LOGGER.info("Creating extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
-      dfs.assertCreateDir(extParentDir);
-      dfs.assertCreateDir(extTableDir);
-
-      if (! (extParentDir.toLowerCase().startsWith("hdfs://")
-          || extParentDir.toLowerCase().startsWith("s3://")
-          || extParentDir.contains("://"))) {
-        String scheme = fileSystem.getUri().toString();
-        LOGGER.info("scheme = " + scheme);
-        extParentDir = scheme + extParentDir;
-        extTableDir = scheme + extTableDir;
-        LOGGER.info("Add scheme in extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
-      }
-
-      policyFile
-          .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-          .addPermissionsToRole("all_uri", "server=server1->uri=" + extParentDir)
-          .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
-      writePolicyFile(policyFile);
-      connection = context.createConnection(USER1_1);
-      statement = context.createStatement(connection);
-      assertSemanticException(statement,
-          "create external table " + DB1 + ".tb1(a int) location '" + extTableDir + "'");
-    } finally {
-      if (statement != null) {
-        statement.close();
-      }
-      if (connection != null) {
-        connection.close();
-      }
-    }
-  }
-}