You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by an...@apache.org on 2016/04/26 05:47:13 UTC

[1/3] sentry git commit: SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao). [Forced Update!]

Repository: sentry
Updated Branches:
  refs/heads/master 4933e0cdd -> 66b32afa8 (forced update)


http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart1.java
new file mode 100644
index 0000000..5e8ed79
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart1.java
@@ -0,0 +1,406 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.junit.Assert;
+
+import org.apache.sentry.provider.file.PolicyFile;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.io.Resources;
+
+/* Tests privileges at table scope within a single database.
+ */
+
+public class TestPrivilegesAtTableScopePart1 extends AbstractTestWithStaticConfiguration {
+
+  private static PolicyFile policyFile;
+  private final static String MULTI_TYPE_DATA_FILE_NAME = "emp.dat";
+
+  @Before
+  public void setup() throws Exception {
+    policyFile = super.setupPolicy();
+    super.setup();
+    prepareDBDataForTest();
+  }
+
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+  protected static void prepareDBDataForTest() throws Exception {
+    // copy data file to test dir
+    File dataDir = context.getDataDir();
+    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    // setup db objects needed by the test
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+
+    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
+    statement.execute("CREATE DATABASE DB_1");
+    statement.execute("USE DB_1");
+
+    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL1);
+    statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL2);
+    statement.execute("CREATE VIEW VIEW_1 AS SELECT A, B FROM " + TBL1);
+
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
+   * TBL1, TBL2 Admin grants SELECT on TBL1, TBL2, INSERT on TBL1 to
+   * USER_GROUP of which user1 is a member.
+   */
+  @Test
+  public void testInsertAndSelect() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab1", "insert_tab1", "select_tab2")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE DB_1");
+    // test user can insert
+    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
+    // test user can query table
+    statement.executeQuery("SELECT A FROM " + TBL2);
+    // negative test: test user can't drop
+    try {
+      statement.execute("DROP TABLE " + TBL1);
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+    statement.close();
+    connection.close();
+
+    // connect as admin and drop TBL1
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("USE DB_1");
+    statement.execute("DROP TABLE " + TBL1);
+    statement.close();
+    connection.close();
+
+    // negative test: connect as user1 and try to recreate TBL1
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("USE DB_1");
+    try {
+      statement.execute("CREATE TABLE " + TBL1 + "(A STRING)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    statement.close();
+    connection.close();
+
+    // connect as admin to restore the TBL1
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("USE DB_1");
+    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
+    statement.close();
+    connection.close();
+
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
+   * TBL1, TBL2. Admin grants INSERT on TBL1, SELECT on TBL2 to USER_GROUP
+   * of which user1 is a member.
+   */
+  @Test
+  public void testInsert() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "insert_tab1", "select_tab2")
+        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    // test user can execute insert on table
+    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
+
+    // negative test: user can't query table
+    try {
+      statement.executeQuery("SELECT A FROM " + TBL1);
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    // negative test: test user can't query view
+    try {
+      statement.executeQuery("SELECT A FROM VIEW_1");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    // negative test case: show tables shouldn't list VIEW_1
+    ResultSet resultSet = statement.executeQuery("SHOW TABLES");
+    while (resultSet.next()) {
+      String tableName = resultSet.getString(1);
+      assertNotNull("table name is null in result set", tableName);
+      assertFalse("Found VIEW_1 in the result set",
+          "VIEW_1".equalsIgnoreCase(tableName));
+    }
+
+    // negative test: test user can't create a new view
+    try {
+      statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1);
+      Assert.fail("Expected SQL Exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
+   * TBL1, TBL2. Admin grants SELECT on TBL1, TBL2 to USER_GROUP of which
+   * user1 is a member.
+   */
+  @Test
+  public void testSelect() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    // test user can execute query on table
+    statement.executeQuery("SELECT A FROM " + TBL1);
+
+    // negative test: test insert into table
+    try {
+      statement.executeQuery("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    // negative test: test user can't query view
+    try {
+      statement.executeQuery("SELECT A FROM VIEW_1");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    // negative test: test user can't create a new view
+    try {
+      statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1);
+      Assert.fail("Expected SQL Exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
+   * loads data into TBL1, TBL2. Admin grants SELECT on TBL1,TBL2 to
+   * USER_GROUP of which user1 is a member.
+   */
+  @Test
+  public void testTableViewJoin() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    // test user can execute query TBL1 JOIN TBL2
+    statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)");
+
+    // negative test: test user can't execute query VIEW_1 JOIN TBL2
+    try {
+      statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
+   * loads data into TBL1, TBL2. Admin grants SELECT on TBL2 to USER_GROUP of
+   * which user1 is a member.
+   */
+  @Test
+  public void testTableViewJoin2() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab2")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    // test user can execute query on TBL2
+    statement.executeQuery("SELECT A FROM " + TBL2);
+
+    // negative test: test user can't execute query VIEW_1 JOIN TBL2
+    try {
+      statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL2 + " ON (VIEW_1.B = " + TBL2 + ".B)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    // negative test: test user can't execute query TBL1 JOIN TBL2
+    try {
+      statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
+   * loads data into TBL1, TBL2. Admin grants SELECT on TBL2, VIEW_1 to
+   * USER_GROUP of which user1 is a member.
+   */
+  @Test
+  public void testTableViewJoin3() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab2", "select_view1")
+        .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    // test user can execute query on TBL2
+    statement.executeQuery("SELECT A FROM " + TBL2);
+
+    // test user can execute query VIEW_1 JOIN TBL2
+    statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)");
+
+    // test user can execute query on VIEW_1
+    statement.executeQuery("SELECT A FROM VIEW_1");
+
+    // negative test: test user can't execute query TBL1 JOIN TBL2
+    try {
+      statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
+   * loads data into TBL1, TBL2. Admin grants SELECT on TBL1, VIEW_1 to
+   * USER_GROUP of which user1 is a member.
+   */
+  @Test
+  public void testTableViewJoin4() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab1", "select_view1")
+        .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // test execution
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+
+    // test user can execute query VIEW_1 JOIN TBL1
+    statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL1 + " ON (VIEW_1.B = " + TBL1 + ".B)");
+
+    // negative test: test user can't execute query TBL1 JOIN TBL2
+    try {
+      statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+
+    statement.close();
+    connection.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart2.java
new file mode 100644
index 0000000..3b5f6a6
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScopePart2.java
@@ -0,0 +1,337 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+import org.apache.sentry.provider.file.PolicyFile;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.io.Resources;
+
+/**
+ * The second test class of testing privileges at table level.
+ */
+public class TestPrivilegesAtTableScopePart2 extends AbstractTestWithStaticConfiguration {
+  private static PolicyFile policyFile;
+  private final static String MULTI_TYPE_DATA_FILE_NAME = "emp.dat";
+
+  @Before
+  public void setup() throws Exception {
+    policyFile = super.setupPolicy();
+    super.setup();
+    prepareDBDataForTest();
+  }
+
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+  protected static void prepareDBDataForTest() throws Exception {
+    // copy data file to test dir
+    File dataDir = context.getDataDir();
+    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    // setup db objects needed by the test
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+
+    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
+    statement.execute("CREATE DATABASE DB_1");
+    statement.execute("USE DB_1");
+
+    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL1);
+    statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL2);
+    statement.execute("CREATE VIEW VIEW_1 AS SELECT A, B FROM " + TBL1);
+
+    statement.close();
+    connection.close();
+  }
+
+  /***
+   * Verify truncate table permissions for different users with different
+   * privileges
+   * @throws Exception
+   */
+  @Test
+  public void testTruncateTable() throws Exception {
+    File dataDir = context.getDataDir();
+    // copy data file to test dir
+    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // setup db objects needed by the test
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+
+    statement.execute("USE " + DB1);
+    statement.execute("DROP TABLE if exists " + TBL1);
+    statement.execute("DROP TABLE if exists " + TBL2);
+    statement.execute("DROP TABLE if exists " + TBL3);
+    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("CREATE TABLE " + TBL3 + "(B INT, A STRING) "
+        + " row format delimited fields terminated by '|'  stored as textfile");
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
+        + "' INTO TABLE " + TBL1);
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
+        + "' INTO TABLE " + TBL2);
+    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
+        + "' INTO TABLE " + TBL3);
+
+    // verify admin can execute truncate table
+    statement.execute("TRUNCATE TABLE " + TBL1);
+    assertFalse(hasData(statement, TBL1));
+
+    statement.close();
+    connection.close();
+
+    // add roles and grant permissions
+    updatePolicyFile();
+
+    // test truncate table without partitions
+    truncateTableTests(false);
+  }
+
+  /***
+   * Verify truncate partitioned permissions for different users with different
+   * privileges
+   * @throws Exception
+   */
+  @Test
+  public void testTruncatePartitionedTable() throws Exception {
+    File dataDir = context.getDataDir();
+    // copy data file to test dir
+    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // create partitioned tables
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    statement.execute("DROP TABLE if exists " + TBL1);
+    statement.execute("CREATE TABLE " + TBL1 + " (i int) PARTITIONED BY (j int)");
+    statement.execute("DROP TABLE if exists " + TBL2);
+    statement.execute("CREATE TABLE " + TBL2 + " (i int) PARTITIONED BY (j int)");
+    statement.execute("DROP TABLE if exists " + TBL3);
+    statement.execute("CREATE TABLE " + TBL3 + " (i int) PARTITIONED BY (j int)");
+
+    // verify admin can execute truncate empty partitioned table
+    statement.execute("TRUNCATE TABLE " + TBL1);
+    assertFalse(hasData(statement, TBL1));
+    statement.close();
+    connection.close();
+
+    // add roles and grant permissions
+    updatePolicyFile();
+
+    // test truncate empty partitioned tables
+    truncateTableTests(false);
+
+    // add partitions to tables
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    statement.execute("ALTER TABLE " + TBL1 + " ADD PARTITION (j=1) PARTITION (j=2)");
+    statement.execute("ALTER TABLE " + TBL2 + " ADD PARTITION (j=1) PARTITION (j=2)");
+    statement.execute("ALTER TABLE " + TBL3 + " ADD PARTITION (j=1) PARTITION (j=2)");
+
+    // verify admin can execute truncate NOT empty partitioned table
+    statement.execute("TRUNCATE TABLE " + TBL1 + " partition (j=1)");
+    statement.execute("TRUNCATE TABLE " + TBL1);
+    assertFalse(hasData(statement, TBL1));
+    statement.close();
+    connection.close();
+
+    // test truncate NOT empty partitioned tables
+    truncateTableTests(true);
+  }
+
+  /**
+   * Test queries without from clause. Hive rewrites the queries with dummy db and table
+   * entities which should not trip authorization check.
+   * @throws Exception
+   */
+  @Test
+  public void testSelectWithoutFrom() throws Exception {
+    policyFile
+        .addRolesToGroup(USERGROUP1, "all_tab1")
+        .addPermissionsToRole("all_tab1",
+            "server=server1->db=" + DB1 + "->table=" + TBL1)
+        .addRolesToGroup(USERGROUP2, "select_tab1")
+        .addPermissionsToRole("select_tab1",
+            "server=server1->db=" + DB1 + "->table=" + TBL1)
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+
+    // test with implicit default database
+    assertTrue(statement.executeQuery("SELECT 1 ").next());
+    assertTrue(statement.executeQuery("SELECT current_database()").next());
+
+    // test after switching database
+    statement.execute("USE " + DB1);
+    assertTrue(statement.executeQuery("SELECT 1 ").next());
+    assertTrue(statement.executeQuery("SELECT current_database() ").next());
+    statement.close();
+    connection.close();
+  }
+
+  // verify that the given table has data
+  private boolean hasData(Statement stmt, String tableName) throws Exception {
+    ResultSet rs1 = stmt.executeQuery("SELECT * FROM " + tableName);
+    boolean hasResults = rs1.next();
+    rs1.close();
+    return hasResults;
+  }
+
+  @Test
+  public void testDummyPartition() throws Exception {
+
+    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+
+    // setup db objects needed by the test
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+
+    statement.execute("USE " + DB1);
+
+    statement.execute("DROP TABLE if exists " + TBL1);
+    statement.execute("CREATE table " + TBL1 + " (a int) PARTITIONED BY (b string, c string)");
+    statement.execute("DROP TABLE if exists " + TBL3);
+    statement.execute("CREATE table " + TBL3 + " (a2 int) PARTITIONED BY (b2 string, c2 string)");
+    statement.close();
+    connection.close();
+
+    policyFile
+        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
+        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
+        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL3 + "->action=insert");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+
+    statement.execute("USE " + DB1);
+    statement.execute("INSERT OVERWRITE TABLE " + TBL3 + " PARTITION(b2='abc', c2) select a, b as c2 from " + TBL1);
+    statement.close();
+    connection.close();
+
+  }
+
+  /**
+   * update policy file for truncate table tests
+   */
+  private void updatePolicyFile() throws Exception{
+    policyFile
+        .addRolesToGroup(USERGROUP1, "all_tab1")
+        .addPermissionsToRole("all_tab1",
+            "server=server1->db=" + DB1 + "->table=" + TBL2)
+        .addRolesToGroup(USERGROUP2, "drop_tab1")
+        .addPermissionsToRole("drop_tab1",
+            "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=drop",
+            "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=select")
+        .addRolesToGroup(USERGROUP3, "select_tab1")
+        .addPermissionsToRole("select_tab1",
+            "server=server1->db=" + DB1 + "->table=" + TBL1 + "->action=select");
+    writePolicyFile(policyFile);
+  }
+
+  /**
+   * Test truncate table with or without partitions for users with different privileges.
+   * Only test truncate table partition if truncPartition is true.
+   */
+  private void truncateTableTests(boolean truncPartition) throws Exception{
+    Connection connection = null;
+    Statement statement = null;
+    try {
+      connection = context.createConnection(USER1_1);
+      statement = context.createStatement(connection);
+      statement.execute("USE " + DB1);
+      // verify all privileges on table can truncate table
+      if (truncPartition) {
+        statement.execute("TRUNCATE TABLE " + TBL2 + " PARTITION (j=1)");
+      }
+      statement.execute("TRUNCATE TABLE " + TBL2);
+      assertFalse(hasData(statement, TBL2));
+      statement.close();
+      connection.close();
+
+      connection = context.createConnection(USER2_1);
+      statement = context.createStatement(connection);
+      statement.execute("USE " + DB1);
+      // verify drop privilege on table can truncate table
+      if (truncPartition) {
+        statement.execute("TRUNCATE TABLE " + TBL3 + " partition (j=1)");
+      }
+      statement.execute("TRUNCATE TABLE " + TBL3);
+      assertFalse(hasData(statement, TBL3));
+      statement.close();
+      connection.close();
+
+      connection = context.createConnection(USER3_1);
+      statement = context.createStatement(connection);
+      statement.execute("USE " + DB1);
+      // verify select privilege on table can NOT truncate table
+      if (truncPartition) {
+        context.assertAuthzException(
+            statement, "TRUNCATE TABLE " + TBL1 + " PARTITION (j=1)");
+      }
+      context.assertAuthzException(statement, "TRUNCATE TABLE " + TBL1);
+    } finally {
+      if (statement != null) {
+        statement.close();
+      }
+      if (connection != null) {
+        connection.close();
+      }
+    }
+  }
+}


[2/3] sentry git commit: SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao).

Posted by an...@apache.org.
http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
new file mode 100644
index 0000000..b474484
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
@@ -0,0 +1,566 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.sentry.provider.file.PolicyFile;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.io.Resources;
+
+/**
+ * Test all operations that require index on table alone (part 1)
+ 1. Create index : HiveOperation.CREATEINDEX
+ 2. Drop index : HiveOperation.DROPINDEX
+ 3. HiveOperation.ALTERINDEX_REBUILD
+ 4. TODO: HiveOperation.ALTERINDEX_PROPS
+ */
+public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
+
+  private PolicyFile policyFile;
+  final String tableName = "tb1";
+
+  static Map<String, String> privileges = new HashMap<String, String>();
+  static {
+    privileges.put("all_server", "server=server1->action=all");
+    privileges.put("create_server", "server=server1->action=create");
+    privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all");
+    privileges.put("select_db1", "server=server1->db=" + DB1 + "->action=select");
+    privileges.put("insert_db1", "server=server1->db=" + DB1 + "->action=insert");
+    privileges.put("create_db1", "server=server1->db=" + DB1 + "->action=create");
+    privileges.put("drop_db1", "server=server1->db=" + DB1 + "->action=drop");
+    privileges.put("alter_db1", "server=server1->db=" + DB1 + "->action=alter");
+    privileges.put("create_db2", "server=server1->db=" + DB2 + "->action=create");
+
+    privileges.put("all_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=all");
+    privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=select");
+    privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=insert");
+    privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=alter");
+    privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + "->table=ptab->action=alter");
+    privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=index");
+    privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=lock");
+    privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=drop");
+    privileges.put("insert_db2_tb2", "server=server1->db=" + DB2 + "->table=tb2->action=insert");
+    privileges.put("select_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=select");
+
+  }
+
+  @Before
+  public void setup() throws Exception{
+    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+  }
+
+  private void adminCreate(String db, String table) throws Exception{
+    adminCreate(db, table, false);
+  }
+
+  private void adminCreate(String db, String table, boolean partitioned) throws Exception{
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
+    statement.execute("CREATE DATABASE " + db);
+    if(table !=null) {
+      if (partitioned) {
+        statement.execute("CREATE table  " + db + "." + table + " (a string) PARTITIONED BY (b string)");
+      } else{
+        statement.execute("CREATE table  " + db + "." + table + " (a string)");
+      }
+
+    }
+    statement.close();
+    connection.close();
+  }
+
+  private void adminCreatePartition() throws Exception{
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require create on Server
+  1. Create database : HiveOperation.CREATEDATABASE
+   */
+  @Test
+  public void testCreateOnServer() throws Exception{
+    policyFile
+        .addPermissionsToRole("create_server", privileges.get("create_server"))
+        .addRolesToGroup(USERGROUP1, "create_server");
+
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Create database " + DB2);
+    statement.close();
+    connection.close();
+
+    //Negative case
+    policyFile
+        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
+        .addRolesToGroup(USERGROUP2, "create_db1");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    context.assertSentrySemanticException(statement, "CREATE database " + DB1, semanticException);
+    statement.close();
+    connection.close();
+
+  }
+
+  @Test
+  public void testInsertInto() throws Exception{
+    File dataFile;
+    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
+        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
+        .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
+
+
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("create table bar (key int)");
+    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table bar");
+    statement.execute("create table foo (key int) partitioned by (part int) stored as parquet");
+    statement.execute("insert into table foo PARTITION(part=1) select key from bar");
+
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require create on Database alone
+  1. Create table : HiveOperation.CREATETABLE
+  */
+  @Test
+  public void testCreateOnDatabase() throws Exception{
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
+        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
+        .addRolesToGroup(USERGROUP1, "create_db1")
+        .addRolesToGroup(USERGROUP2, "all_db1");
+
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("CREATE TABLE " + DB1 + ".tb2(a int)");
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("CREATE TABLE " + DB1 + ".tb3(a int)");
+
+    statement.close();
+    connection.close();
+
+    //Negative case
+    policyFile
+        .addPermissionsToRole("all_db1_tb1", privileges.get("select_db1"))
+        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    context.assertSentrySemanticException(statement, "CREATE TABLE " + DB1 + ".tb1(a int)", semanticException);
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require drop on Database alone
+  1. Drop database : HiveOperation.DROPDATABASE
+  */
+  @Test
+  public void testDropOnDatabase() throws Exception{
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("drop_db1", privileges.get("drop_db1"))
+        .addRolesToGroup(USERGROUP1, "drop_db1");
+
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE " + DB1);
+    statement.close();
+    connection.close();
+
+    adminCreate(DB1, null);
+
+    policyFile
+        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
+        .addRolesToGroup(USERGROUP2, "all_db1");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE " + DB1);
+
+    statement.close();
+    connection.close();
+
+    //Negative case
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
+        .addRolesToGroup(USERGROUP3, "select_db1");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    context.assertSentrySemanticException(statement, "drop database " + DB1, semanticException);
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require alter on Database alone
+  1. Alter database : HiveOperation.ALTERDATABASE
+   */
+  @Test
+  public void testAlterOnDatabase() throws Exception{
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("alter_db1", privileges.get("alter_db1"))
+        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
+        .addRolesToGroup(USERGROUP2, "all_db1")
+        .addRolesToGroup(USERGROUP1, "alter_db1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
+    statement.close();
+    connection.close();
+
+    //Negative case
+    adminCreate(DB1, null);
+    policyFile
+        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
+        .addRolesToGroup(USERGROUP3, "select_db1");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')", semanticException);
+    statement.close();
+    connection.close();
+  }
+
+  /* SELECT/INSERT on DATABASE
+   1. HiveOperation.DESCDATABASE
+   */
+  @Test
+  public void testDescDB() throws Exception {
+    adminCreate(DB1, tableName);
+    policyFile
+        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
+        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
+        .addRolesToGroup(USERGROUP1, "select_db1")
+        .addRolesToGroup(USERGROUP2, "insert_db1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("describe database " + DB1);
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("describe database " + DB1);
+    statement.close();
+    connection.close();
+
+    //Negative case
+    policyFile
+        .addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
+        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
+    writePolicyFile(policyFile);
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    context.assertSentrySemanticException(statement, "describe database " + DB1, semanticException);
+    statement.close();
+    connection.close();
+
+  }
+
+  private void assertSemanticException(Statement stmt, String command) throws SQLException{
+    context.assertSentrySemanticException(stmt, command, semanticException);
+  }
+
+  /*
+  1. Analyze table (HiveOperation.QUERY) : select + insert on table
+   */
+  @Test
+  public void testSelectAndInsertOnTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+    adminCreatePartition();
+    policyFile
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db1_tb1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ANALYZE TABLE tb1 PARTITION (b='1' ) COMPUTE STATISTICS");
+    statement.close();
+    connection.close();
+  }
+
+  /* Operations which require select on table alone
+  1. HiveOperation.QUERY
+  2. HiveOperation.SHOW_TBLPROPERTIES
+  3. HiveOperation.SHOW_CREATETABLE
+  4. HiveOperation.SHOWINDEXES
+  5. HiveOperation.SHOWCOLUMNS
+  6. Describe tb1 : HiveOperation.DESCTABLE5.
+  7. HiveOperation.SHOWPARTITIONS
+  8. TODO: show functions?
+  9. HiveOperation.SHOW_TABLESTATUS
+   */
+  @Test
+  public void testSelectOnTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+    adminCreatePartition();
+    policyFile
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("select * from tb1");
+
+    statement.executeQuery("SHOW Partitions tb1");
+    statement.executeQuery("SHOW TBLPROPERTIES tb1");
+    statement.executeQuery("SHOW CREATE TABLE tb1");
+    statement.executeQuery("SHOW indexes on tb1");
+    statement.executeQuery("SHOW COLUMNS from tb1");
+    statement.executeQuery("SHOW functions '.*'");
+    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
+
+    statement.executeQuery("DESCRIBE tb1");
+    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
+
+    statement.close();
+    connection.close();
+
+    //Negative case
+    adminCreate(DB2, tableName);
+    policyFile
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP3, "insert_db1_tb1");
+    writePolicyFile(policyFile);
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    context.assertSentrySemanticException(statement, "select * from tb1", semanticException);
+    context.assertSentrySemanticException(statement,
+        "SHOW TABLE EXTENDED IN " + DB2 + " LIKE 'tb*'", semanticException);
+
+    statement.close();
+    connection.close();
+
+
+  }
+
+  /* Operations which require insert on table alone
+  1. HiveOperation.SHOW_TBLPROPERTIES
+  2. HiveOperation.SHOW_CREATETABLE
+  3. HiveOperation.SHOWINDEXES
+  4. HiveOperation.SHOWCOLUMNS
+  5. HiveOperation.DESCTABLE
+  6. HiveOperation.SHOWPARTITIONS
+  7. TODO: show functions?
+  8. TODO: lock, unlock, Show locks
+  9. HiveOperation.SHOW_TABLESTATUS
+   */
+  @Test
+  public void testInsertOnTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+    adminCreatePartition();
+    policyFile
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "insert_db1_tb1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    /*statement.execute("LOCK TABLE tb1 EXCLUSIVE");
+    statement.execute("UNLOCK TABLE tb1");
+    */
+    statement.executeQuery("SHOW TBLPROPERTIES tb1");
+    statement.executeQuery("SHOW CREATE TABLE tb1");
+    statement.executeQuery("SHOW indexes on tb1");
+    statement.executeQuery("SHOW COLUMNS from tb1");
+    statement.executeQuery("SHOW functions '.*'");
+    //statement.executeQuery("SHOW LOCKS tb1");
+    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
+
+    //NoViableAltException
+    //statement.executeQuery("SHOW transactions");
+    //statement.executeQuery("SHOW compactions");
+    statement.executeQuery("DESCRIBE tb1");
+    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
+    statement.executeQuery("SHOW Partitions tb1");
+
+
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require alter on table
+  1. HiveOperation.ALTERTABLE_PROPERTIES
+  2. HiveOperation.ALTERTABLE_SERDEPROPERTIES
+  3. HiveOperation.ALTERTABLE_CLUSTER_SORT
+  4. HiveOperation.ALTERTABLE_TOUCH
+  5. HiveOperation.ALTERTABLE_PROTECTMODE
+  6. HiveOperation.ALTERTABLE_FILEFORMAT
+  7. HiveOperation.ALTERTABLE_RENAMEPART
+  8. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
+  9. TODO: archive partition
+  10. TODO: unarchive partition
+  11. HiveOperation.ALTERPARTITION_FILEFORMAT
+  12. TODO: partition touch (is it same as  HiveOperation.ALTERTABLE_TOUCH?)
+  13. HiveOperation.ALTERPARTITION_PROTECTMODE
+  14. HiveOperation.ALTERTABLE_RENAMECOL
+  15. HiveOperation.ALTERTABLE_ADDCOLS
+  16. HiveOperation.ALTERTABLE_REPLACECOLS
+  17. TODO: HiveOperation.ALTERVIEW_PROPERTIES
+  18. TODO: HiveOperation.ALTERTABLE_SERIALIZER
+  19. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
+   */
+  @Test
+  public void testAlterTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+
+    Connection connection;
+    Statement statement;
+    //Setup
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
+    statement.execute("DROP TABLE IF EXISTS ptab");
+    statement.execute("CREATE TABLE ptab (a int) STORED AS PARQUET");
+
+    policyFile
+      .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
+      .addPermissionsToRole("alter_db1_ptab", privileges.get("alter_db1_ptab"))
+      .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "alter_db1_ptab")
+      .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+      .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
+    writePolicyFile(policyFile);
+
+    //Negative test cases
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
+    assertSemanticException(statement, "ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
+    assertSemanticException(statement, "ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
+    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH");
+    assertSemanticException(statement, "ALTER TABLE tb1 ENABLE NO_DROP");
+    assertSemanticException(statement, "ALTER TABLE tb1 DISABLE OFFLINE");
+    assertSemanticException(statement, "ALTER TABLE tb1 SET FILEFORMAT RCFILE");
+
+    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) RENAME TO PARTITION (b = 2)");
+    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET SERDEPROPERTIES ('field.delim' = ',')");
+    //assertSemanticException(statement, "ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
+    //assertSemanticException(statement, "ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
+    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET FILEFORMAT RCFILE");
+    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH PARTITION (b = 10)");
+    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE NO_DROP");
+    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE OFFLINE");
+
+    assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c int");
+    assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
+    assertSemanticException(statement, "ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
+    assertSemanticException(statement, "MSCK REPAIR TABLE tb1");
+
+    //assertSemanticException(statement, "ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
+
+
+    statement.close();
+    connection.close();
+
+    //Positive cases
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
+    statement.execute("ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
+    statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
+    statement.execute("ALTER TABLE tb1 TOUCH");
+    statement.execute("ALTER TABLE tb1 ENABLE NO_DROP");
+    statement.execute("ALTER TABLE tb1 DISABLE OFFLINE");
+    statement.execute("ALTER TABLE tb1 SET FILEFORMAT RCFILE");
+
+    statement.execute("ALTER TABLE tb1 PARTITION (b = 1) RENAME TO PARTITION (b = 2)");
+    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET SERDEPROPERTIES ('field.delim' = ',')");
+    //statement.execute("ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
+    //statement.execute("ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
+    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET FILEFORMAT RCFILE");
+    statement.execute("ALTER TABLE tb1 TOUCH PARTITION (b = 2)");
+    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE NO_DROP");
+    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE OFFLINE");
+
+    statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
+    statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
+    statement.execute("ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
+    statement.execute("MSCK REPAIR TABLE tb1");
+
+    //statement.execute("ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
+
+    statement.close();
+    connection.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart2.java
new file mode 100644
index 0000000..8eb2851
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart2.java
@@ -0,0 +1,663 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.sentry.provider.file.PolicyFile;
+import static org.junit.Assert.assertTrue;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.common.io.Resources;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test all operations that require index on table alone (part 2)
+ 1. Create index : HiveOperation.CREATEINDEX
+ 2. Drop index : HiveOperation.DROPINDEX
+ 3. HiveOperation.ALTERINDEX_REBUILD
+ 4. TODO: HiveOperation.ALTERINDEX_PROPS
+ */
+public class TestOperationsPart2 extends AbstractTestWithStaticConfiguration {
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(TestOperationsPart2.class);
+
+  private PolicyFile policyFile;
+  final String tableName = "tb1";
+
+  static Map<String, String> privileges = new HashMap<String, String>();
+  static {
+    privileges.put("all_server", "server=server1->action=all");
+    privileges.put("create_server", "server=server1->action=create");
+    privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all");
+    privileges.put("select_db1", "server=server1->db=" + DB1 + "->action=select");
+    privileges.put("insert_db1", "server=server1->db=" + DB1 + "->action=insert");
+    privileges.put("create_db1", "server=server1->db=" + DB1 + "->action=create");
+    privileges.put("drop_db1", "server=server1->db=" + DB1 + "->action=drop");
+    privileges.put("alter_db1", "server=server1->db=" + DB1 + "->action=alter");
+    privileges.put("create_db2", "server=server1->db=" + DB2 + "->action=create");
+
+    privileges.put("all_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=all");
+    privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=select");
+    privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=insert");
+    privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=alter");
+    privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + "->table=ptab->action=alter");
+    privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=index");
+    privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=lock");
+    privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=drop");
+    privileges.put("insert_db2_tb2", "server=server1->db=" + DB2 + "->table=tb2->action=insert");
+    privileges.put("select_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=select");
+
+  }
+
+  @Before
+  public void setup() throws Exception{
+    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
+        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
+    writePolicyFile(policyFile);
+  }
+
+  private void adminCreate(String db, String table) throws Exception{
+    adminCreate(db, table, false);
+  }
+
+  private void adminCreate(String db, String table, boolean partitioned) throws Exception{
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
+    statement.execute("CREATE DATABASE " + db);
+    if(table !=null) {
+      if (partitioned) {
+        statement.execute("CREATE table  " + db + "." + table + " (a string) PARTITIONED BY (b string)");
+      } else{
+        statement.execute("CREATE table  " + db + "." + table + " (a string)");
+      }
+
+    }
+    statement.close();
+    connection.close();
+  }
+
+  private void assertSemanticException(Statement stmt, String command) throws SQLException{
+    context.assertSentrySemanticException(stmt, command, semanticException);
+  }
+
+  @Test
+  public void testIndexTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+    policyFile
+        .addPermissionsToRole("index_db1_tb1", privileges.get("index_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "index_db1_tb1")
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
+    writePolicyFile(policyFile);
+
+    Connection connection;
+    Statement statement;
+
+    //Positive cases
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("CREATE INDEX table01_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
+    statement.execute("ALTER INDEX table01_index ON tb1 REBUILD");
+    statement.close();
+    connection.close();
+
+    //Negative case
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "CREATE INDEX table02_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
+    assertSemanticException(statement, "ALTER INDEX table01_index ON tb1 REBUILD");
+    assertSemanticException(statement, "DROP INDEX table01_index ON tb1");
+    statement.close();
+    connection.close();
+
+    //Positive cases
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("DROP INDEX table01_index ON tb1");
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations that require drop on table alone
+  1. Create index : HiveOperation.DROPTABLE
+  */
+  @Test
+  public void testDropTable() throws Exception {
+    adminCreate(DB1, tableName, true);
+    policyFile
+        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "drop_db1_tb1")
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
+    writePolicyFile(policyFile);
+
+    Connection connection;
+    Statement statement;
+
+    //Negative case
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "drop table " + tableName);
+
+    statement.close();
+    connection.close();
+
+    //Positive cases
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("drop table " + tableName);
+
+    statement.close();
+    connection.close();
+  }
+
+  @Ignore
+  @Test
+  public void testLockTable() throws Exception {
+    //TODO
+  }
+
+  /* Operations that require alter + drop on table
+    1. HiveOperation.ALTERTABLE_DROPPARTS
+  */
+  @Test
+  public void dropPartition() throws Exception {
+    adminCreate(DB1, tableName, true);
+    policyFile
+        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
+        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
+        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "drop_db1_tb1")
+        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
+
+    writePolicyFile(policyFile);
+
+    Connection connection;
+    Statement statement;
+    //Setup
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
+
+    //Negative case
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 DROP PARTITION (b = 10)");
+
+    //Positive case
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 DROP PARTITION (b = 10)");
+    statement.close();
+    connection.close();
+  }
+
+  /*
+   1. HiveOperation.ALTERTABLE_RENAME
+   */
+  @Test
+  public void renameTable() throws Exception {
+    adminCreate(DB1, tableName);
+    policyFile
+        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
+        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
+        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "create_db1")
+        .addRolesToGroup(USERGROUP2, "create_db1")
+        .addRolesToGroup(USERGROUP3, "alter_db1_tb1");
+
+    writePolicyFile(policyFile);
+
+    Connection connection;
+    Statement statement;
+
+    //Negative cases
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
+    statement.close();
+    connection.close();
+
+    //Positive case
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 RENAME TO tb2");
+    statement.close();
+    connection.close();
+  }
+
+  /* Test all operations which require alter on table (+ all on URI)
+   1. HiveOperation.ALTERTABLE_LOCATION
+   2. HiveOperation.ALTERTABLE_ADDPARTS
+   3. TODO: HiveOperation.ALTERPARTITION_LOCATION
+   4. TODO: HiveOperation.ALTERTBLPART_SKEWED_LOCATION
+   */
+  @Test
+  public void testAlterOnTableAndURI() throws Exception {
+    adminCreate(DB1, tableName, true);
+    String tabLocation = dfs.getBaseDir() + "/" + Math.random();
+    policyFile
+        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
+        .addPermissionsToRole("all_uri", "server=server1->uri=" + tabLocation)
+        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "all_uri")
+        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
+
+    writePolicyFile(policyFile);
+
+    //Case with out uri
+    Connection connection = context.createConnection(USER2_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("USE " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
+    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
+
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
+    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
+    statement.close();
+    connection.close();
+
+    //Negative case: User2_1 has privileges on table but on on uri
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    context.assertSentrySemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'",
+        semanticException);
+    context.assertSentrySemanticException(statement,
+        "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'",
+        semanticException);
+    statement.close();
+    connection.close();
+
+    //Negative case: User3_1 has only insert privileges on table
+    policyFile
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "all_uri");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '2') ");
+    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
+
+    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '"
+        + tabLocation + "/part'");
+    statement.close();
+    connection.close();
+
+
+  }
+
+  /* Create on Database and select on table
+  1. Create view :  HiveOperation.CREATEVIEW
+   */
+  @Test
+  public void testCreateView() throws Exception {
+    adminCreate(DB1, tableName);
+    adminCreate(DB2, null);
+    policyFile
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addPermissionsToRole("create_db2", privileges.get("create_db2"))
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("use " + DB2);
+    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
+    statement.close();
+    connection.close();
+
+    //Negative case
+    policyFile
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "create_db2");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB2);
+    context.assertSentrySemanticException(statement, "create view view1 as select a from " + DB1 + ".tb1",
+        semanticException);
+    statement.close();
+    connection.close();
+
+
+  }
+
+  /*
+   1. HiveOperation.IMPORT : Create on db + all on URI
+   2. HiveOperation.EXPORT : SELECT on table + all on uri
+   */
+
+  @Test
+  public void testExportImport() throws Exception {
+    File dataFile;
+    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    dropDb(ADMIN1, DB1);
+    createDb(ADMIN1, DB1);
+    createTable(ADMIN1, DB1, dataFile, tableName);
+    String location = dfs.getBaseDir() + "/" + Math.random();
+    policyFile
+        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
+        .addPermissionsToRole("all_uri", "server=server1->uri="+ location)
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "all_uri")
+        .addRolesToGroup(USERGROUP2, "create_db1", "all_uri")
+        .addRolesToGroup(USERGROUP3, "insert_db1", "all_uri");
+    writePolicyFile(policyFile);
+    Connection connection;
+    Statement statement;
+
+    //Negative case
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    context.assertSentrySemanticException(statement, "export table tb1 to '" + location + "'",
+        semanticException);
+    statement.close();
+    connection.close();
+
+    //Positive
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("export table tb1 to '" + location + "'" );
+    statement.close();
+    connection.close();
+
+    //Negative
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    context.assertSentrySemanticException(statement, "import table tb2 from '" + location + "'",
+        semanticException);
+    statement.close();
+    connection.close();
+
+    //Positive
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("import table tb2 from '" + location + "'");
+    statement.close();
+    connection.close();
+
+  }
+
+  /*
+  1. HiveOperation.LOAD: INSERT on table + all on uri
+   */
+  @Test
+  public void testLoad() throws Exception {
+    File dataFile;
+    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    adminCreate(DB1, tableName);
+
+    policyFile
+        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
+        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
+        .addRolesToGroup(USERGROUP1, "insert_db1_tb1", "all_uri");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table tb1" );
+    statement.close();
+    connection.close();
+  }
+
+  /*
+  1. HiveOperation.CREATETABLE_AS_SELECT : Create on db + select on table
+   */
+  @Test
+  public void testCTAS() throws Exception {
+    adminCreate(DB1, tableName);
+    adminCreate(DB2, null);
+
+    String location = dfs.getBaseDir() + "/" + Math.random();
+
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
+    statement.execute("Use " + DB1);
+    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
+    statement.close();
+    connection.close();
+
+    policyFile
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addPermissionsToRole("select_db1_view1", privileges.get("select_db1_view1"))
+        .addPermissionsToRole("create_db2", privileges.get("create_db2"))
+        .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2")
+        .addRolesToGroup(USERGROUP2, "select_db1_view1", "create_db2")
+        .addRolesToGroup(USERGROUP3, "select_db1_tb1", "create_db2,all_uri");
+    writePolicyFile(policyFile);
+
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB2);
+    statement.execute("create table tb2 as select a from " + DB1 + ".tb1");
+    //Ensure CTAS fails without URI
+    context.assertSentrySemanticException(statement, "create table tb3 location '" + location +
+            "' as select a from " + DB1 + ".tb1",
+        semanticException);
+    context.assertSentrySemanticException(statement, "create table tb3 as select a from " + DB1 + ".view1",
+        semanticException);
+
+
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("Use " + DB2);
+    statement.execute("create table tb3 as select a from " + DB1 + ".view1" );
+    context.assertSentrySemanticException(statement, "create table tb4 as select a from " + DB1 + ".tb1",
+        semanticException);
+
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER3_1);
+    statement = context.createStatement(connection);
+    //CTAS is valid with URI
+    statement.execute("Use " + DB2);
+    statement.execute("create table tb4 location '" + location +
+        "' as select a from " + DB1 + ".tb1");
+
+    statement.close();
+    connection.close();
+
+  }
+
+
+  /*
+  1. INSERT : IP: select on table, OP: insert on table + all on uri(optional)
+   */
+  @Test
+  public void testInsert() throws Exception {
+    File dataFile;
+    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+
+    dropDb(ADMIN1, DB1);
+    dropDb(ADMIN1, DB2);
+    createDb(ADMIN1, DB1);
+    createDb(ADMIN1, DB2);
+    createTable(ADMIN1, DB1, dataFile, tableName);
+    createTable(ADMIN1, DB2, null, "tb2");
+    String location = dfs.getBaseDir() + "/" + Math.random();
+
+    policyFile
+        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
+        .addPermissionsToRole("insert_db2_tb2", privileges.get("insert_db2_tb2"))
+        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db2_tb2")
+        .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
+        .addRolesToGroup(USERGROUP2, "select_db1_tb1", "all_uri");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER1_1);
+    Statement statement = context.createStatement(connection);
+    assertSemanticException(statement, "insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1");
+    statement.execute("insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER2_1);
+    statement = context.createStatement(connection);
+    statement.execute("insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1" );
+    assertSemanticException(statement, "insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
+    statement.close();
+    connection.close();
+  }
+
+  @Test
+  public void testFullyQualifiedTableName() throws Exception{
+    Connection connection;
+    Statement statement;
+    connection = context.createConnection(ADMIN1);
+    statement = context.createStatement(connection);
+    statement.execute("create database " + DB1);
+    statement.execute("create table " + DB1 + ".tb1(a int)");
+    statement.execute("DROP table " + DB1 + ".tb1");
+    statement.execute("create table " + DB1 + ".tb1(a int)");
+    statement.execute("use " + DB1);
+    statement.execute("drop table tb1");
+  }
+
+  @Test
+  public void testExternalTables() throws Exception{
+    createDb(ADMIN1, DB1);
+    File externalTblDir = new File(dataDir, "exttab");
+    assertTrue("Unable to create directory for external table test" , externalTblDir.mkdir());
+
+    policyFile
+        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
+        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir.getPath())
+        .addRolesToGroup(USERGROUP1, "create_db1", "all_uri")
+        .addRolesToGroup(USERGROUP2, "create_db1");
+    writePolicyFile(policyFile);
+
+    Connection connection = context.createConnection(USER2_1);
+    Statement statement = context.createStatement(connection);
+    assertSemanticException(statement, "create external table " + DB1 + ".tb1(a int) stored as " +
+        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
+    //Create external table on HDFS
+    assertSemanticException(statement, "create external table " + DB1 + ".tb2(a int) location '/user/hive/warehouse/blah'");
+    statement.close();
+    connection.close();
+
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
+    statement.execute("create external table " + DB1 + ".tb1(a int) stored as " +
+        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
+    statement.close();
+    connection.close();
+
+
+  }
+
+  @Test
+  public void testCaseSensitivity() throws Exception {
+    Statement statement = null;
+    Connection connection = null;
+    try {
+      createDb(ADMIN1, DB1);
+      String scratchLikeDir = context.getProperty(HiveConf.ConfVars.SCRATCHDIR.varname);
+      LOGGER.info("scratch like dir = " + scratchLikeDir);
+      String extParentDir = scratchLikeDir + "/ABC/hhh";
+      String extTableDir = scratchLikeDir + "/abc/hhh";
+      LOGGER.info("Creating extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
+      dfs.assertCreateDir(extParentDir);
+      dfs.assertCreateDir(extTableDir);
+
+      if (! (extParentDir.toLowerCase().startsWith("hdfs://")
+          || extParentDir.toLowerCase().startsWith("s3://")
+          || extParentDir.contains("://"))) {
+        String scheme = fileSystem.getUri().toString();
+        LOGGER.info("scheme = " + scheme);
+        extParentDir = scheme + extParentDir;
+        extTableDir = scheme + extTableDir;
+        LOGGER.info("Add scheme in extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
+      }
+
+      policyFile
+          .addPermissionsToRole("all_db1", privileges.get("all_db1"))
+          .addPermissionsToRole("all_uri", "server=server1->uri=" + extParentDir)
+          .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
+      writePolicyFile(policyFile);
+      connection = context.createConnection(USER1_1);
+      statement = context.createStatement(connection);
+      assertSemanticException(statement,
+          "create external table " + DB1 + ".tb1(a int) location '" + extTableDir + "'");
+    } finally {
+      if (statement != null) {
+        statement.close();
+      }
+      if (connection != null) {
+        connection.close();
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
deleted file mode 100644
index 4c1cd8e..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtTableScope.java
+++ /dev/null
@@ -1,662 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.tests.e2e.hive;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import org.junit.Assert;
-
-import org.apache.sentry.provider.file.PolicyFile;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import com.google.common.io.Resources;
-
-/* Tests privileges at table scope within a single database.
- */
-
-public class TestPrivilegesAtTableScope extends AbstractTestWithStaticConfiguration {
-
-  private static PolicyFile policyFile;
-  private final static String MULTI_TYPE_DATA_FILE_NAME = "emp.dat";
-
-  @Before
-  public void setup() throws Exception {
-    policyFile = super.setupPolicy();
-    super.setup();
-    prepareDBDataForTest();
-  }
-
-  @BeforeClass
-  public static void setupTestStaticConfiguration() throws Exception {
-    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
-  }
-
-  protected static void prepareDBDataForTest() throws Exception {
-    // copy data file to test dir
-    File dataDir = context.getDataDir();
-    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    // setup db objects needed by the test
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-
-    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
-    statement.execute("CREATE DATABASE DB_1");
-    statement.execute("USE DB_1");
-
-    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL1);
-    statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' INTO TABLE " + TBL2);
-    statement.execute("CREATE VIEW VIEW_1 AS SELECT A, B FROM " + TBL1);
-
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
-   * TBL1, TBL2 Admin grants SELECT on TBL1, TBL2, INSERT on TBL1 to
-   * USER_GROUP of which user1 is a member.
-   */
-  @Test
-  public void testInsertAndSelect() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab1", "insert_tab1", "select_tab2")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE DB_1");
-    // test user can insert
-    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
-    // test user can query table
-    statement.executeQuery("SELECT A FROM " + TBL2);
-    // negative test: test user can't drop
-    try {
-      statement.execute("DROP TABLE " + TBL1);
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-    statement.close();
-    connection.close();
-
-    // connect as admin and drop TBL1
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("USE DB_1");
-    statement.execute("DROP TABLE " + TBL1);
-    statement.close();
-    connection.close();
-
-    // negative test: connect as user1 and try to recreate TBL1
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("USE DB_1");
-    try {
-      statement.execute("CREATE TABLE " + TBL1 + "(A STRING)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    statement.close();
-    connection.close();
-
-    // connect as admin to restore the TBL1
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("USE DB_1");
-    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
-    statement.close();
-    connection.close();
-
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
-   * TBL1, TBL2. Admin grants INSERT on TBL1, SELECT on TBL2 to USER_GROUP
-   * of which user1 is a member.
-   */
-  @Test
-  public void testInsert() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "insert_tab1", "select_tab2")
-        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    // test user can execute insert on table
-    statement.execute("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
-
-    // negative test: user can't query table
-    try {
-      statement.executeQuery("SELECT A FROM " + TBL1);
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    // negative test: test user can't query view
-    try {
-      statement.executeQuery("SELECT A FROM VIEW_1");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    // negative test case: show tables shouldn't list VIEW_1
-    ResultSet resultSet = statement.executeQuery("SHOW TABLES");
-    while (resultSet.next()) {
-      String tableName = resultSet.getString(1);
-      assertNotNull("table name is null in result set", tableName);
-      assertFalse("Found VIEW_1 in the result set",
-          "VIEW_1".equalsIgnoreCase(tableName));
-    }
-
-    // negative test: test user can't create a new view
-    try {
-      statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1);
-      Assert.fail("Expected SQL Exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, loads data into
-   * TBL1, TBL2. Admin grants SELECT on TBL1, TBL2 to USER_GROUP of which
-   * user1 is a member.
-   */
-  @Test
-  public void testSelect() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .addPermissionsToRole("insert_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=insert")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    // test user can execute query on table
-    statement.executeQuery("SELECT A FROM " + TBL1);
-
-    // negative test: test insert into table
-    try {
-      statement.executeQuery("INSERT INTO TABLE " + TBL1 + " SELECT A, B FROM " + TBL2);
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    // negative test: test user can't query view
-    try {
-      statement.executeQuery("SELECT A FROM VIEW_1");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    // negative test: test user can't create a new view
-    try {
-      statement.executeQuery("CREATE VIEW VIEW_2(A) AS SELECT A FROM " + TBL1);
-      Assert.fail("Expected SQL Exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
-   * loads data into TBL1, TBL2. Admin grants SELECT on TBL1,TBL2 to
-   * USER_GROUP of which user1 is a member.
-   */
-  @Test
-  public void testTableViewJoin() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    // test user can execute query TBL1 JOIN TBL2
-    statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)");
-
-    // negative test: test user can't execute query VIEW_1 JOIN TBL2
-    try {
-      statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
-   * loads data into TBL1, TBL2. Admin grants SELECT on TBL2 to USER_GROUP of
-   * which user1 is a member.
-   */
-  @Test
-  public void testTableViewJoin2() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab2")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    // test user can execute query on TBL2
-    statement.executeQuery("SELECT A FROM " + TBL2);
-
-    // negative test: test user can't execute query VIEW_1 JOIN TBL2
-    try {
-      statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL2 + " ON (VIEW_1.B = " + TBL2 + ".B)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    // negative test: test user can't execute query TBL1 JOIN TBL2
-    try {
-      statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
-   * loads data into TBL1, TBL2. Admin grants SELECT on TBL2, VIEW_1 to
-   * USER_GROUP of which user1 is a member.
-   */
-  @Test
-  public void testTableViewJoin3() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab2", "select_view1")
-        .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL2 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    // test user can execute query on TBL2
-    statement.executeQuery("SELECT A FROM " + TBL2);
-
-    // test user can execute query VIEW_1 JOIN TBL2
-    statement.executeQuery("SELECT V1.B FROM VIEW_1 V1 JOIN " + TBL2 + " T2 ON (V1.B = T2.B)");
-
-    // test user can execute query on VIEW_1
-    statement.executeQuery("SELECT A FROM VIEW_1");
-
-    // negative test: test user can't execute query TBL1 JOIN TBL2
-    try {
-      statement.executeQuery("SELECT T1.B FROM " + TBL1 + " T1 JOIN " + TBL2 + " T2 ON (T1.B = T2.B)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   * Admin creates database DB_1, table TBL1, TBL2 in DB_1, VIEW_1 on TBL1
-   * loads data into TBL1, TBL2. Admin grants SELECT on TBL1, VIEW_1 to
-   * USER_GROUP of which user1 is a member.
-   */
-  @Test
-  public void testTableViewJoin4() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab1", "select_view1")
-        .addPermissionsToRole("select_view1", "server=server1->db=DB_1->table=VIEW_1->action=select")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // test execution
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-
-    // test user can execute query VIEW_1 JOIN TBL1
-    statement.executeQuery("SELECT VIEW_1.B FROM VIEW_1 JOIN " + TBL1 + " ON (VIEW_1.B = " + TBL1 + ".B)");
-
-    // negative test: test user can't execute query TBL1 JOIN TBL2
-    try {
-      statement.executeQuery("SELECT " + TBL1 + ".B FROM " + TBL1 + " JOIN " + TBL2 + " ON (" + TBL1 + ".B = " + TBL2 + ".B)");
-      Assert.fail("Expected SQL exception");
-    } catch (SQLException e) {
-      context.verifyAuthzException(e);
-    }
-
-    statement.close();
-    connection.close();
-  }
-
-  /***
-   * Verify truncate table permissions for different users with different
-   * privileges
-   * @throws Exception
-   */
-  @Test
-  public void testTruncateTable() throws Exception {
-    File dataDir = context.getDataDir();
-    // copy data file to test dir
-    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // setup db objects needed by the test
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-
-    statement.execute("USE " + DB1);
-    statement.execute("DROP TABLE if exists " + TBL1);
-    statement.execute("DROP TABLE if exists " + TBL2);
-    statement.execute("DROP TABLE if exists " + TBL3);
-    statement.execute("CREATE TABLE " + TBL1 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("CREATE TABLE " + TBL2 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("CREATE TABLE " + TBL3 + "(B INT, A STRING) "
-        + " row format delimited fields terminated by '|'  stored as textfile");
-    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
-        + "' INTO TABLE " + TBL1);
-    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
-        + "' INTO TABLE " + TBL2);
-    statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
-        + "' INTO TABLE " + TBL3);
-
-    // verify admin can execute truncate table
-    statement.execute("TRUNCATE TABLE " + TBL1);
-    assertFalse(hasData(statement, TBL1));
-
-    statement.close();
-    connection.close();
-
-    // add roles and grant permissions
-    updatePolicyFile();
-
-    // test truncate table without partitions
-    truncateTableTests(false);
-  }
-
-  /***
-   * Verify truncate partitioned permissions for different users with different
-   * privileges
-   * @throws Exception
-   */
-  @Test
-  public void testTruncatePartitionedTable() throws Exception {
-    File dataDir = context.getDataDir();
-    // copy data file to test dir
-    File dataFile = new File(dataDir, MULTI_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(MULTI_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // create partitioned tables
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    statement.execute("DROP TABLE if exists " + TBL1);
-    statement.execute("CREATE TABLE " + TBL1 + " (i int) PARTITIONED BY (j int)");
-    statement.execute("DROP TABLE if exists " + TBL2);
-    statement.execute("CREATE TABLE " + TBL2 + " (i int) PARTITIONED BY (j int)");
-    statement.execute("DROP TABLE if exists " + TBL3);
-    statement.execute("CREATE TABLE " + TBL3 + " (i int) PARTITIONED BY (j int)");
-
-    // verify admin can execute truncate empty partitioned table
-    statement.execute("TRUNCATE TABLE " + TBL1);
-    assertFalse(hasData(statement, TBL1));
-    statement.close();
-    connection.close();
-
-    // add roles and grant permissions
-    updatePolicyFile();
-
-    // test truncate empty partitioned tables
-    truncateTableTests(false);
-
-    // add partitions to tables
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    statement.execute("ALTER TABLE " + TBL1 + " ADD PARTITION (j=1) PARTITION (j=2)");
-    statement.execute("ALTER TABLE " + TBL2 + " ADD PARTITION (j=1) PARTITION (j=2)");
-    statement.execute("ALTER TABLE " + TBL3 + " ADD PARTITION (j=1) PARTITION (j=2)");
-
-    // verify admin can execute truncate NOT empty partitioned table
-    statement.execute("TRUNCATE TABLE " + TBL1 + " partition (j=1)");
-    statement.execute("TRUNCATE TABLE " + TBL1);
-    assertFalse(hasData(statement, TBL1));
-    statement.close();
-    connection.close();
-
-    // test truncate NOT empty partitioned tables
-    truncateTableTests(true);
-  }
-
-  /**
-   * Test queries without from clause. Hive rewrites the queries with dummy db and table
-   * entities which should not trip authorization check.
-   * @throws Exception
-   */
-  @Test
-  public void testSelectWithoutFrom() throws Exception {
-    policyFile
-        .addRolesToGroup(USERGROUP1, "all_tab1")
-        .addPermissionsToRole("all_tab1",
-            "server=server1->db=" + DB1 + "->table=" + TBL1)
-        .addRolesToGroup(USERGROUP2, "select_tab1")
-        .addPermissionsToRole("select_tab1",
-            "server=server1->db=" + DB1 + "->table=" + TBL1)
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-
-    // test with implicit default database
-    assertTrue(statement.executeQuery("SELECT 1 ").next());
-    assertTrue(statement.executeQuery("SELECT current_database()").next());
-
-    // test after switching database
-    statement.execute("USE " + DB1);
-    assertTrue(statement.executeQuery("SELECT 1 ").next());
-    assertTrue(statement.executeQuery("SELECT current_database() ").next());
-    statement.close();
-    connection.close();
-  }
-
-  // verify that the given table has data
-  private boolean hasData(Statement stmt, String tableName) throws Exception {
-    ResultSet rs1 = stmt.executeQuery("SELECT * FROM " + tableName);
-    boolean hasResults = rs1.next();
-    rs1.close();
-    return hasResults;
-  }
-
-  @Test
-  public void testDummyPartition() throws Exception {
-
-    policyFile.setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-
-    // setup db objects needed by the test
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-
-    statement.execute("USE " + DB1);
-
-    statement.execute("DROP TABLE if exists " + TBL1);
-    statement.execute("CREATE table " + TBL1 + " (a int) PARTITIONED BY (b string, c string)");
-    statement.execute("DROP TABLE if exists " + TBL3);
-    statement.execute("CREATE table " + TBL3 + " (a2 int) PARTITIONED BY (b2 string, c2 string)");
-    statement.close();
-    connection.close();
-
-    policyFile
-        .addRolesToGroup(USERGROUP1, "select_tab1", "select_tab2")
-        .addPermissionsToRole("select_tab1", "server=server1->db=DB_1->table=" + TBL1 + "->action=select")
-        .addPermissionsToRole("select_tab2", "server=server1->db=DB_1->table=" + TBL3 + "->action=insert");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-
-    statement.execute("USE " + DB1);
-    statement.execute("INSERT OVERWRITE TABLE " + TBL3 + " PARTITION(b2='abc', c2) select a, b as c2 from " + TBL1);
-    statement.close();
-    connection.close();
-
-  }
-
-  /**
-   * update policy file for truncate table tests
-   */
-  private void updatePolicyFile() throws Exception{
-    policyFile
-        .addRolesToGroup(USERGROUP1, "all_tab1")
-        .addPermissionsToRole("all_tab1",
-            "server=server1->db=" + DB1 + "->table=" + TBL2)
-        .addRolesToGroup(USERGROUP2, "drop_tab1")
-        .addPermissionsToRole("drop_tab1",
-            "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=drop",
-            "server=server1->db=" + DB1 + "->table=" + TBL3 + "->action=select")
-        .addRolesToGroup(USERGROUP3, "select_tab1")
-        .addPermissionsToRole("select_tab1",
-            "server=server1->db=" + DB1 + "->table=" + TBL1 + "->action=select");
-    writePolicyFile(policyFile);
-  }
-
-  /**
-   * Test truncate table with or without partitions for users with different privileges.
-   * Only test truncate table partition if truncPartition is true.
-   */
-  private void truncateTableTests(boolean truncPartition) throws Exception{
-    Connection connection = null;
-    Statement statement = null;
-    try {
-      connection = context.createConnection(USER1_1);
-      statement = context.createStatement(connection);
-      statement.execute("USE " + DB1);
-      // verify all privileges on table can truncate table
-      if (truncPartition) {
-        statement.execute("TRUNCATE TABLE " + TBL2 + " PARTITION (j=1)");
-      }
-      statement.execute("TRUNCATE TABLE " + TBL2);
-      assertFalse(hasData(statement, TBL2));
-      statement.close();
-      connection.close();
-
-      connection = context.createConnection(USER2_1);
-      statement = context.createStatement(connection);
-      statement.execute("USE " + DB1);
-      // verify drop privilege on table can truncate table
-      if (truncPartition) {
-        statement.execute("TRUNCATE TABLE " + TBL3 + " partition (j=1)");
-      }
-      statement.execute("TRUNCATE TABLE " + TBL3);
-      assertFalse(hasData(statement, TBL3));
-      statement.close();
-      connection.close();
-
-      connection = context.createConnection(USER3_1);
-      statement = context.createStatement(connection);
-      statement.execute("USE " + DB1);
-      // verify select privilege on table can NOT truncate table
-      if (truncPartition) {
-        context.assertAuthzException(
-            statement, "TRUNCATE TABLE " + TBL1 + " PARTITION (j=1)");
-      }
-      context.assertAuthzException(statement, "TRUNCATE TABLE " + TBL1);
-    } finally {
-      if (statement != null) {
-        statement.close();
-      }
-      if (connection != null) {
-        connection.close();
-      }
-    }
-  }
-}


[3/3] sentry git commit: SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao).

Posted by an...@apache.org.
SENTRY-1216: disable sentry ha tests for now; add time out for each test class/method; fix trainsient junit time out issue. (Anne Yu, reviewed by HaoHao).


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/66b32afa
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/66b32afa
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/66b32afa

Branch: refs/heads/master
Commit: 66b32afa87fab816af972d68b253f46c53ec7f58
Parents: a051630
Author: Anne Yu <an...@cloudera.com>
Authored: Mon Apr 25 15:04:09 2016 -0700
Committer: Anne Yu <an...@cloudera.com>
Committed: Mon Apr 25 21:18:27 2016 -0700

----------------------------------------------------------------------
 sentry-tests/sentry-tests-hive/pom.xml          |    6 +-
 .../dbprovider/AbstractTestWithDbProvider.java  |    8 +
 .../tests/e2e/dbprovider/TestDbConnections.java |    4 +
 .../tests/e2e/dbprovider/TestDbOperations.java  |   37 -
 .../e2e/dbprovider/TestDbOperationsPart1.java   |   40 +
 .../e2e/dbprovider/TestDbOperationsPart2.java   |   39 +
 .../TestDbPrivilegesAtTableScope.java           |   39 -
 .../TestDbPrivilegesAtTableScopePart1.java      |   39 +
 .../TestDbPrivilegesAtTableScopePart2.java      |   39 +
 .../tests/e2e/hdfs/TestHDFSIntegration.java     |   12 +-
 .../e2e/hdfs/TestHDFSIntegrationWithHA.java     |    2 +
 .../AbstractTestWithStaticConfiguration.java    |   42 +-
 .../sentry/tests/e2e/hive/TestOperations.java   | 1125 ------------------
 .../tests/e2e/hive/TestOperationsPart1.java     |  566 +++++++++
 .../tests/e2e/hive/TestOperationsPart2.java     |  663 +++++++++++
 .../e2e/hive/TestPrivilegesAtTableScope.java    |  662 -----------
 .../hive/TestPrivilegesAtTableScopePart1.java   |  406 +++++++
 .../hive/TestPrivilegesAtTableScopePart2.java   |  337 ++++++
 18 files changed, 2195 insertions(+), 1871 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/pom.xml b/sentry-tests/sentry-tests-hive/pom.xml
index 9d2ef76..e36b5c0 100644
--- a/sentry-tests/sentry-tests-hive/pom.xml
+++ b/sentry-tests/sentry-tests-hive/pom.xml
@@ -466,13 +466,15 @@ limitations under the License.
           <include>**/TestDbPerDatabasePolicyFile.java</include>
           <include>**/TestDbPrivilegeAtTransform.java</include>
           <include>**/TestDbPrivilegesAtDatabaseScope.java</include>
-          <include>**/TestDbPrivilegesAtTableScope.java</include>
+          <include>**/TestDbPrivilegesAtTableScopePart1.java</include>
+          <include>**/TestDbPrivilegesAtTableScopePart2.java</include>
           <include>**/TestDbSandboxOps.java</include>
           <include>**/TestDbExportImportPrivileges.java</include>
           <include>**/TestDbUriPermissions.java</include>
           <include>**/TestDbRuntimeMetadataRetrieval.java</include>
           <include>**/TestDatabaseProvider.java</include>
-          <include>**/TestDbOperations.java</include>
+          <include>**/TestDbOperationsPart1.java</include>
+          <include>**/TestDbOperationsPart2.java</include>
           <include>**/TestPrivilegeWithGrantOption.java</include>
           <include>**/TestDbPrivilegesAtColumnScope.java</include>
           <include>**/TestColumnEndToEnd.java</include>

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
index 17a2d1e..7d36d73 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java
@@ -42,6 +42,9 @@ import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
 import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.Timeout;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
@@ -49,6 +52,11 @@ import com.google.common.io.Files;
 
 public abstract class AbstractTestWithDbProvider extends AbstractTestWithHiveServer {
 
+  @ClassRule
+  public static Timeout classTimeout = new Timeout(600000); //millis, each class runs less than 600s (10m)
+  @Rule
+  public Timeout timeout = new Timeout(180000); //millis, each test runs less than 180s (3m)
+
   protected static final String SERVER_HOST = "localhost";
 
   protected static Map<String, String> properties = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
index 2af0536..49fb182 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbConnections.java
@@ -26,6 +26,9 @@ import org.apache.sentry.provider.db.SentryAccessDeniedException;
 import org.apache.sentry.provider.db.SentryAlreadyExistsException;
 import org.apache.sentry.provider.file.PolicyFile;
 import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+
+import static org.junit.Assume.assumeThat;
+import static org.hamcrest.Matchers.is;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -44,6 +47,7 @@ public class TestDbConnections extends AbstractTestWithStaticConfiguration {
     super.setupAdmin();
     super.setup();
     PolicyFile.setAdminOnServer1(ADMINGROUP);
+    assumeThat(getSentrySrv().getNumActiveClients(), is(0L));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
deleted file mode 100644
index 3fab344..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperations.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.tests.e2e.dbprovider;
-
-import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
-import org.apache.sentry.tests.e2e.hive.TestOperations;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-public class TestDbOperations extends TestOperations{
-  @Override
-  @Before
-  public void setup() throws Exception {
-    super.setupAdmin();
-    super.setup();
-  }
-  @BeforeClass
-  public static void setupTestStaticConfiguration() throws Exception {
-    useSentryService = true;
-    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
new file mode 100644
index 0000000..4ccf270
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart1.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.dbprovider;
+
+/**
+ * Test db operations part 1
+ */
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestOperationsPart1;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbOperationsPart1 extends TestOperationsPart1 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
new file mode 100644
index 0000000..d57d669
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbOperationsPart2.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestOperationsPart2;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+/**
+ * Test db operations part 2
+ */
+public class TestDbOperationsPart2 extends TestOperationsPart2 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
deleted file mode 100644
index a4f07df..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScope.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.tests.e2e.dbprovider;
-
-import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
-import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScope;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-public class TestDbPrivilegesAtTableScope extends TestPrivilegesAtTableScope {
-  @Override
-  @Before
-  public void setup() throws Exception {
-    super.setupAdmin();
-    super.setup();
-    prepareDBDataForTest();
-  }
-  @BeforeClass
-  public static void setupTestStaticConfiguration() throws Exception {
-    useSentryService = true;
-    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
new file mode 100644
index 0000000..9bb476c
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart1.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScopePart1;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbPrivilegesAtTableScopePart1 extends TestPrivilegesAtTableScopePart1 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+    prepareDBDataForTest();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
new file mode 100644
index 0000000..e21d344
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegesAtTableScopePart2.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.apache.sentry.tests.e2e.hive.TestPrivilegesAtTableScopePart2;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public class TestDbPrivilegesAtTableScopePart2 extends TestPrivilegesAtTableScopePart2 {
+  @Override
+  @Before
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+    prepareDBDataForTest();
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
index 99242d7..c3a5379 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
@@ -40,8 +40,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import com.google.common.base.Preconditions;
 
 import org.apache.sentry.core.common.utils.PathUtils;
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -83,11 +81,17 @@ import org.apache.sentry.tests.e2e.hive.hiveserver.InternalMetastoreServer;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrv;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory;
 import org.fest.reflect.core.Reflection;
+
+import org.junit.Assert;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.Timeout;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -101,6 +105,10 @@ public class TestHDFSIntegration {
   private static final Logger LOGGER = LoggerFactory
       .getLogger(TestHDFSIntegration.class);
 
+  @ClassRule
+  public static Timeout classTimeout = new Timeout(600000); //millis, each class runs less than 600s (10m)
+  @Rule
+  public Timeout timeout = new Timeout(180000); //millis, each test runs less than 180s (3m)
 
   public static class WordCountMapper extends MapReduceBase implements
       Mapper<LongWritable, Text, String, Long> {

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
index 92c0693..be6d082 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
@@ -18,7 +18,9 @@
 package org.apache.sentry.tests.e2e.hdfs;
 
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 
+@Ignore ("Disable sentry HA tests for now")
 public class TestHDFSIntegrationWithHA extends TestHDFSIntegration {
   @BeforeClass
   public static void setup() throws Exception {

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
index cb5039b..d2a1d36 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
@@ -35,7 +35,18 @@ import java.util.Map;
 import java.util.HashSet;
 
 import com.google.common.collect.Sets;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.RuleChain;
+import org.junit.rules.Timeout;
+import org.junit.rules.TestRule;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -65,10 +76,7 @@ import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrvFactory.SentrySrvType;
 import org.apache.sentry.tests.e2e.minisentry.SentrySrv;
 import org.apache.tools.ant.util.StringUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -82,6 +90,32 @@ import javax.security.auth.login.LoginContext;
 public abstract class AbstractTestWithStaticConfiguration {
   private static final Logger LOGGER = LoggerFactory
       .getLogger(AbstractTestWithStaticConfiguration.class);
+
+  @ClassRule
+  public final static TestRule timeoutClass = RuleChain
+      .outerRule(new TestWatcher() {
+        @Override
+        protected void failed(Throwable e, Description description) {
+          LOGGER.error("Time out = " + e);
+          if (e.getMessage().contains("test timed out after")) {
+            LOGGER.error("Test class time out, but caught by rule, description = " + description + "ex = " + e);
+          }
+        }
+      })
+      .around(new Timeout(600000)); //millis, each test runs less than 600s (or 10m)
+
+  @Rule
+  public final TestRule timeout = RuleChain
+      .outerRule(new TestWatcher() {
+        @Override
+        protected void failed(Throwable e, Description description) {
+          if (e.getMessage().contains("test timed out after")) {
+            LOGGER.error("Test method time out, but caught by rule, description = " + description + "ex = " + e);
+          }
+        }
+      })
+      .around(new Timeout(180000)); //millis, each test runs less than 180s (or 3m)
+
   protected static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat";
   protected static final String ALL_DB1 = "server=server1->db=db_1",
       ALL_DB2 = "server=server1->db=db_2",

http://git-wip-us.apache.org/repos/asf/sentry/blob/66b32afa/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
deleted file mode 100644
index 77106d4..0000000
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperations.java
+++ /dev/null
@@ -1,1125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.tests.e2e.hive;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.sentry.provider.file.PolicyFile;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import com.google.common.io.Resources;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestOperations extends AbstractTestWithStaticConfiguration {
-  private static final Logger LOGGER = LoggerFactory
-      .getLogger(TestOperations.class);
-
-  private PolicyFile policyFile;
-  final String tableName = "tb1";
-
-  static Map<String, String> privileges = new HashMap<String, String>();
-  static {
-    privileges.put("all_server", "server=server1->action=all");
-    privileges.put("create_server", "server=server1->action=create");
-    privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all");
-    privileges.put("select_db1", "server=server1->db=" + DB1 + "->action=select");
-    privileges.put("insert_db1", "server=server1->db=" + DB1 + "->action=insert");
-    privileges.put("create_db1", "server=server1->db=" + DB1 + "->action=create");
-    privileges.put("drop_db1", "server=server1->db=" + DB1 + "->action=drop");
-    privileges.put("alter_db1", "server=server1->db=" + DB1 + "->action=alter");
-    privileges.put("create_db2", "server=server1->db=" + DB2 + "->action=create");
-
-    privileges.put("all_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=all");
-    privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=select");
-    privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=insert");
-    privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=alter");
-    privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + "->table=ptab->action=alter");
-    privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=index");
-    privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=lock");
-    privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=drop");
-    privileges.put("insert_db2_tb2", "server=server1->db=" + DB2 + "->table=tb2->action=insert");
-    privileges.put("select_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=select");
-
-  }
-
-  @Before
-  public void setup() throws Exception{
-    policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
-        .setUserGroupMapping(StaticUserGroup.getStaticMapping());
-    writePolicyFile(policyFile);
-  }
-
-  private void adminCreate(String db, String table) throws Exception{
-    adminCreate(db, table, false);
-  }
-
-  private void adminCreate(String db, String table, boolean partitioned) throws Exception{
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
-    statement.execute("CREATE DATABASE " + db);
-    if(table !=null) {
-      if (partitioned) {
-        statement.execute("CREATE table  " + db + "." + table + " (a string) PARTITIONED BY (b string)");
-      } else{
-        statement.execute("CREATE table  " + db + "." + table + " (a string)");
-      }
-
-    }
-    statement.close();
-    connection.close();
-  }
-
-  private void adminCreatePartition() throws Exception{
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require create on Server
-  1. Create database : HiveOperation.CREATEDATABASE
-   */
-  @Test
-  public void testCreateOnServer() throws Exception{
-    policyFile
-        .addPermissionsToRole("create_server", privileges.get("create_server"))
-        .addRolesToGroup(USERGROUP1, "create_server");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Create database " + DB2);
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addRolesToGroup(USERGROUP2, "create_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "CREATE database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-
-  }
-
-  @Test
-  public void testInsertInto() throws Exception{
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
-        .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
-
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("create table bar (key int)");
-    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table bar");
-    statement.execute("create table foo (key int) partitioned by (part int) stored as parquet");
-    statement.execute("insert into table foo PARTITION(part=1) select key from bar");
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require create on Database alone
-  1. Create table : HiveOperation.CREATETABLE
-  */
-  @Test
-  public void testCreateOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP1, "create_db1")
-        .addRolesToGroup(USERGROUP2, "all_db1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("CREATE TABLE " + DB1 + ".tb2(a int)");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("CREATE TABLE " + DB1 + ".tb3(a int)");
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("all_db1_tb1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "CREATE TABLE " + DB1 + ".tb1(a int)", semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require drop on Database alone
-  1. Drop database : HiveOperation.DROPDATABASE
-  */
-  @Test
-  public void testDropOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("drop_db1", privileges.get("drop_db1"))
-        .addRolesToGroup(USERGROUP1, "drop_db1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE " + DB1);
-    statement.close();
-    connection.close();
-
-    adminCreate(DB1, null);
-
-    policyFile
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP2, "all_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("DROP DATABASE " + DB1);
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "select_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "drop database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require alter on Database alone
-  1. Alter database : HiveOperation.ALTERDATABASE
-   */
-  @Test
-  public void testAlterOnDatabase() throws Exception{
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("alter_db1", privileges.get("alter_db1"))
-        .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-        .addRolesToGroup(USERGROUP2, "all_db1")
-        .addRolesToGroup(USERGROUP1, "alter_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB1, null);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addRolesToGroup(USERGROUP3, "select_db1");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')", semanticException);
-    statement.close();
-    connection.close();
-  }
-
-  /* SELECT/INSERT on DATABASE
-   1. HiveOperation.DESCDATABASE
-   */
-  @Test
-  public void testDescDB() throws Exception {
-    adminCreate(DB1, tableName);
-    policyFile
-        .addPermissionsToRole("select_db1", privileges.get("select_db1"))
-        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
-        .addRolesToGroup(USERGROUP1, "select_db1")
-        .addRolesToGroup(USERGROUP2, "insert_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("describe database " + DB1);
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("describe database " + DB1);
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "all_db1_tb1");
-    writePolicyFile(policyFile);
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    context.assertSentrySemanticException(statement, "describe database " + DB1, semanticException);
-    statement.close();
-    connection.close();
-
-  }
-
-  private void assertSemanticException(Statement stmt, String command) throws SQLException{
-    context.assertSentrySemanticException(stmt, command, semanticException);
-  }
-
-  /*
-  1. Analyze table (HiveOperation.QUERY) : select + insert on table
-   */
-  @Test
-  public void testSelectAndInsertOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ANALYZE TABLE tb1 PARTITION (b='1' ) COMPUTE STATISTICS");
-    statement.close();
-    connection.close();
-  }
-
-  /* Operations which require select on table alone
-  1. HiveOperation.QUERY
-  2. HiveOperation.SHOW_TBLPROPERTIES
-  3. HiveOperation.SHOW_CREATETABLE
-  4. HiveOperation.SHOWINDEXES
-  5. HiveOperation.SHOWCOLUMNS
-  6. Describe tb1 : HiveOperation.DESCTABLE5.
-  7. HiveOperation.SHOWPARTITIONS
-  8. TODO: show functions?
-  9. HiveOperation.SHOW_TABLESTATUS
-   */
-  @Test
-  public void testSelectOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("select * from tb1");
-
-    statement.executeQuery("SHOW Partitions tb1");
-    statement.executeQuery("SHOW TBLPROPERTIES tb1");
-    statement.executeQuery("SHOW CREATE TABLE tb1");
-    statement.executeQuery("SHOW indexes on tb1");
-    statement.executeQuery("SHOW COLUMNS from tb1");
-    statement.executeQuery("SHOW functions '.*'");
-    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
-
-    statement.executeQuery("DESCRIBE tb1");
-    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
-
-    statement.close();
-    connection.close();
-
-    //Negative case
-    adminCreate(DB2, tableName);
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "select * from tb1", semanticException);
-    context.assertSentrySemanticException(statement,
-        "SHOW TABLE EXTENDED IN " + DB2 + " LIKE 'tb*'", semanticException);
-
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /* Operations which require insert on table alone
-  1. HiveOperation.SHOW_TBLPROPERTIES
-  2. HiveOperation.SHOW_CREATETABLE
-  3. HiveOperation.SHOWINDEXES
-  4. HiveOperation.SHOWCOLUMNS
-  5. HiveOperation.DESCTABLE
-  6. HiveOperation.SHOWPARTITIONS
-  7. TODO: show functions?
-  8. TODO: lock, unlock, Show locks
-  9. HiveOperation.SHOW_TABLESTATUS
-   */
-  @Test
-  public void testInsertOnTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    adminCreatePartition();
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    /*statement.execute("LOCK TABLE tb1 EXCLUSIVE");
-    statement.execute("UNLOCK TABLE tb1");
-    */
-    statement.executeQuery("SHOW TBLPROPERTIES tb1");
-    statement.executeQuery("SHOW CREATE TABLE tb1");
-    statement.executeQuery("SHOW indexes on tb1");
-    statement.executeQuery("SHOW COLUMNS from tb1");
-    statement.executeQuery("SHOW functions '.*'");
-    //statement.executeQuery("SHOW LOCKS tb1");
-    statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
-
-    //NoViableAltException
-    //statement.executeQuery("SHOW transactions");
-    //statement.executeQuery("SHOW compactions");
-    statement.executeQuery("DESCRIBE tb1");
-    statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
-    statement.executeQuery("SHOW Partitions tb1");
-
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require alter on table
-  1. HiveOperation.ALTERTABLE_PROPERTIES
-  2. HiveOperation.ALTERTABLE_SERDEPROPERTIES
-  3. HiveOperation.ALTERTABLE_CLUSTER_SORT
-  4. HiveOperation.ALTERTABLE_TOUCH
-  5. HiveOperation.ALTERTABLE_PROTECTMODE
-  6. HiveOperation.ALTERTABLE_FILEFORMAT
-  7. HiveOperation.ALTERTABLE_RENAMEPART
-  8. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
-  9. TODO: archive partition
-  10. TODO: unarchive partition
-  11. HiveOperation.ALTERPARTITION_FILEFORMAT
-  12. TODO: partition touch (is it same as  HiveOperation.ALTERTABLE_TOUCH?)
-  13. HiveOperation.ALTERPARTITION_PROTECTMODE
-  14. HiveOperation.ALTERTABLE_RENAMECOL
-  15. HiveOperation.ALTERTABLE_ADDCOLS
-  16. HiveOperation.ALTERTABLE_REPLACECOLS
-  17. TODO: HiveOperation.ALTERVIEW_PROPERTIES
-  18. TODO: HiveOperation.ALTERTABLE_SERIALIZER
-  19. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
-   */
-  @Test
-  public void testAlterTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-
-    Connection connection;
-    Statement statement;
-    //Setup
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-    statement.execute("DROP TABLE IF EXISTS ptab");
-    statement.execute("CREATE TABLE ptab (a int) STORED AS PARQUET");
-
-    policyFile
-      .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-      .addPermissionsToRole("alter_db1_ptab", privileges.get("alter_db1_ptab"))
-      .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "alter_db1_ptab")
-      .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-      .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    //Negative test cases
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
-    assertSemanticException(statement, "ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
-    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH");
-    assertSemanticException(statement, "ALTER TABLE tb1 ENABLE NO_DROP");
-    assertSemanticException(statement, "ALTER TABLE tb1 DISABLE OFFLINE");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET FILEFORMAT RCFILE");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) RENAME TO PARTITION (b = 2)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET SERDEPROPERTIES ('field.delim' = ',')");
-    //assertSemanticException(statement, "ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
-    //assertSemanticException(statement, "ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET FILEFORMAT RCFILE");
-    assertSemanticException(statement, "ALTER TABLE tb1 TOUCH PARTITION (b = 10)");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE NO_DROP");
-    assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE OFFLINE");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c int");
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
-    assertSemanticException(statement, "ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
-    assertSemanticException(statement, "MSCK REPAIR TABLE tb1");
-
-    //assertSemanticException(statement, "ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-
-
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-    statement.execute("ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
-    statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
-    statement.execute("ALTER TABLE tb1 TOUCH");
-    statement.execute("ALTER TABLE tb1 ENABLE NO_DROP");
-    statement.execute("ALTER TABLE tb1 DISABLE OFFLINE");
-    statement.execute("ALTER TABLE tb1 SET FILEFORMAT RCFILE");
-
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 1) RENAME TO PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET SERDEPROPERTIES ('field.delim' = ',')");
-    //statement.execute("ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
-    //statement.execute("ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET FILEFORMAT RCFILE");
-    statement.execute("ALTER TABLE tb1 TOUCH PARTITION (b = 2)");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE NO_DROP");
-    statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE OFFLINE");
-
-    statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
-    statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
-    statement.execute("ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
-    statement.execute("MSCK REPAIR TABLE tb1");
-
-    //statement.execute("ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
-
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require index on table alone
-  1. Create index : HiveOperation.CREATEINDEX
-  2. Drop index : HiveOperation.DROPINDEX
-  3. HiveOperation.ALTERINDEX_REBUILD
-  4. TODO: HiveOperation.ALTERINDEX_PROPS
-  */
-  @Test
-  public void testIndexTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("index_db1_tb1", privileges.get("index_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "index_db1_tb1")
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("CREATE INDEX table01_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
-    statement.execute("ALTER INDEX table01_index ON tb1 REBUILD");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "CREATE INDEX table02_index ON TABLE tb1 (a) AS 'COMPACT' WITH DEFERRED REBUILD");
-    assertSemanticException(statement, "ALTER INDEX table01_index ON tb1 REBUILD");
-    assertSemanticException(statement, "DROP INDEX table01_index ON tb1");
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("DROP INDEX table01_index ON tb1");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations that require drop on table alone
-  1. Create index : HiveOperation.DROPTABLE
-  */
-  @Test
-  public void testDropTable() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "drop_db1_tb1")
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP2, "insert_db1_tb1");
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "drop table " + tableName);
-
-    statement.close();
-    connection.close();
-
-    //Positive cases
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("drop table " + tableName);
-
-    statement.close();
-    connection.close();
-  }
-
-  @Ignore
-  @Test
-  public void testLockTable() throws Exception {
-   //TODO
-  }
-
-  /* Operations that require alter + drop on table
-    1. HiveOperation.ALTERTABLE_DROPPARTS
-  */
-  @Test
-  public void dropPartition() throws Exception {
-    adminCreate(DB1, tableName, true);
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("drop_db1_tb1", privileges.get("drop_db1_tb1"))
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "drop_db1_tb1")
-        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-    //Setup
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-
-    //Negative case
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 DROP PARTITION (b = 10)");
-
-    //Positive case
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 DROP PARTITION (b = 10)");
-    statement.close();
-    connection.close();
-  }
-
-  /*
-   1. HiveOperation.ALTERTABLE_RENAME
-   */
-  @Test
-  public void renameTable() throws Exception {
-    adminCreate(DB1, tableName);
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "create_db1")
-        .addRolesToGroup(USERGROUP2, "create_db1")
-        .addRolesToGroup(USERGROUP3, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    Connection connection;
-    Statement statement;
-
-    //Negative cases
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-
-    //Positive case
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 RENAME TO tb2");
-    statement.close();
-    connection.close();
-  }
-
-  /* Test all operations which require alter on table (+ all on URI)
-   1. HiveOperation.ALTERTABLE_LOCATION
-   2. HiveOperation.ALTERTABLE_ADDPARTS
-   3. TODO: HiveOperation.ALTERPARTITION_LOCATION
-   4. TODO: HiveOperation.ALTERTBLPART_SKEWED_LOCATION
-   */
-  @Test
-  public void testAlterOnTableAndURI() throws Exception {
-    adminCreate(DB1, tableName, true);
-    String tabLocation = dfs.getBaseDir() + "/" + Math.random();
-    policyFile
-        .addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=" + tabLocation)
-        .addRolesToGroup(USERGROUP1, "alter_db1_tb1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "alter_db1_tb1");
-
-    writePolicyFile(policyFile);
-
-    //Case with out uri
-    Connection connection = context.createConnection(USER2_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("USE " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'");
-    statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
-    statement.close();
-    connection.close();
-
-    //Negative case: User2_1 has privileges on table but on on uri
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'",
-        semanticException);
-    context.assertSentrySemanticException(statement,
-        "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '" + tabLocation + "/part'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Negative case: User3_1 has only insert privileges on table
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '2') ");
-    assertSemanticException(statement, "ALTER TABLE tb1 SET LOCATION '" + tabLocation + "'");
-
-    assertSemanticException(statement, "ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '3') LOCATION '"
-        + tabLocation + "/part'");
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /* Create on Database and select on table
-  1. Create view :  HiveOperation.CREATEVIEW
-   */
-  @Test
-  public void testCreateView() throws Exception {
-    adminCreate(DB1, tableName);
-    adminCreate(DB2, null);
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("create_db2", privileges.get("create_db2"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("use " + DB2);
-    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    //Negative case
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addRolesToGroup(USERGROUP3, "insert_db1_tb1", "create_db2");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    context.assertSentrySemanticException(statement, "create view view1 as select a from " + DB1 + ".tb1",
-        semanticException);
-    statement.close();
-    connection.close();
-
-
-  }
-
-  /*
-   1. HiveOperation.IMPORT : Create on db + all on URI
-   2. HiveOperation.EXPORT : SELECT on table + all on uri
-   */
-
-  @Test
-  public void testExportImport() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    dropDb(ADMIN1, DB1);
-    createDb(ADMIN1, DB1);
-    createTable(ADMIN1, DB1, dataFile, tableName);
-    String location = dfs.getBaseDir() + "/" + Math.random();
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri="+ location)
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "create_db1", "all_uri")
-        .addRolesToGroup(USERGROUP3, "insert_db1", "all_uri");
-    writePolicyFile(policyFile);
-    Connection connection;
-    Statement statement;
-
-    //Negative case
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "export table tb1 to '" + location + "'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Positive
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("export table tb1 to '" + location + "'" );
-    statement.close();
-    connection.close();
-
-    //Negative
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    context.assertSentrySemanticException(statement, "import table tb2 from '" + location + "'",
-        semanticException);
-    statement.close();
-    connection.close();
-
-    //Positive
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("import table tb2 from '" + location + "'");
-    statement.close();
-    connection.close();
-
-  }
-
-  /*
-  1. HiveOperation.LOAD: INSERT on table + all on uri
-   */
-  @Test
-  public void testLoad() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    adminCreate(DB1, tableName);
-
-    policyFile
-        .addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
-        .addRolesToGroup(USERGROUP1, "insert_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("load data local inpath '" + dataFile.getPath() + "' into table tb1" );
-    statement.close();
-    connection.close();
-  }
-
-  /*
-  1. HiveOperation.CREATETABLE_AS_SELECT : Create on db + select on table
-   */
-  @Test
-  public void testCTAS() throws Exception {
-    adminCreate(DB1, tableName);
-    adminCreate(DB2, null);
-
-    String location = dfs.getBaseDir() + "/" + Math.random();
-
-    Connection connection = context.createConnection(ADMIN1);
-    Statement statement = context.createStatement(connection);
-    statement.execute("Use " + DB1);
-    statement.execute("create view view1 as select a from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    policyFile
-      .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-      .addPermissionsToRole("select_db1_view1", privileges.get("select_db1_view1"))
-      .addPermissionsToRole("create_db2", privileges.get("create_db2"))
-      .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
-      .addRolesToGroup(USERGROUP1, "select_db1_tb1", "create_db2")
-      .addRolesToGroup(USERGROUP2, "select_db1_view1", "create_db2")
-      .addRolesToGroup(USERGROUP3, "select_db1_tb1", "create_db2,all_uri");
-    writePolicyFile(policyFile);
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb2 as select a from " + DB1 + ".tb1");
-    //Ensure CTAS fails without URI
-    context.assertSentrySemanticException(statement, "create table tb3 location '" + location +
-        "' as select a from " + DB1 + ".tb1",
-      semanticException);
-    context.assertSentrySemanticException(statement, "create table tb3 as select a from " + DB1 + ".view1",
-      semanticException);
-
-
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb3 as select a from " + DB1 + ".view1" );
-    context.assertSentrySemanticException(statement, "create table tb4 as select a from " + DB1 + ".tb1",
-      semanticException);
-
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER3_1);
-    statement = context.createStatement(connection);
-    //CTAS is valid with URI
-    statement.execute("Use " + DB2);
-    statement.execute("create table tb4 location '" + location +
-      "' as select a from " + DB1 + ".tb1");
-
-    statement.close();
-    connection.close();
-
-  }
-
-
-  /*
-  1. INSERT : IP: select on table, OP: insert on table + all on uri(optional)
-   */
-  @Test
-  public void testInsert() throws Exception {
-    File dataFile;
-    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
-    FileOutputStream to = new FileOutputStream(dataFile);
-    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
-    to.close();
-
-    dropDb(ADMIN1, DB1);
-    dropDb(ADMIN1, DB2);
-    createDb(ADMIN1, DB1);
-    createDb(ADMIN1, DB2);
-    createTable(ADMIN1, DB1, dataFile, tableName);
-    createTable(ADMIN1, DB2, null, "tb2");
-    String location = dfs.getBaseDir() + "/" + Math.random();
-
-    policyFile
-        .addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
-        .addPermissionsToRole("insert_db2_tb2", privileges.get("insert_db2_tb2"))
-        .addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db2_tb2")
-        .addPermissionsToRole("all_uri", "server=server1->uri=" + location)
-        .addRolesToGroup(USERGROUP2, "select_db1_tb1", "all_uri");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER1_1);
-    Statement statement = context.createStatement(connection);
-    assertSemanticException(statement, "insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1");
-    statement.execute("insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER2_1);
-    statement = context.createStatement(connection);
-    statement.execute("insert overwrite directory '" + location + "' select * from " + DB1 + ".tb1" );
-    assertSemanticException(statement, "insert overwrite table " + DB2 + ".tb2 select * from " + DB1 + ".tb1");
-    statement.close();
-    connection.close();
-  }
-
-  @Test
-  public void testFullyQualifiedTableName() throws Exception{
-    Connection connection;
-    Statement statement;
-    connection = context.createConnection(ADMIN1);
-    statement = context.createStatement(connection);
-    statement.execute("create database " + DB1);
-    statement.execute("create table " + DB1 + ".tb1(a int)");
-    statement.execute("DROP table " + DB1 + ".tb1");
-    statement.execute("create table " + DB1 + ".tb1(a int)");
-    statement.execute("use " + DB1);
-    statement.execute("drop table tb1");
-  }
-
-  @Test
-  public void testExternalTables() throws Exception{
-    createDb(ADMIN1, DB1);
-    File externalTblDir = new File(dataDir, "exttab");
-    assertTrue("Unable to create directory for external table test" , externalTblDir.mkdir());
-
-    policyFile
-        .addPermissionsToRole("create_db1", privileges.get("create_db1"))
-        .addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir.getPath())
-        .addRolesToGroup(USERGROUP1, "create_db1", "all_uri")
-        .addRolesToGroup(USERGROUP2, "create_db1");
-    writePolicyFile(policyFile);
-
-    Connection connection = context.createConnection(USER2_1);
-    Statement statement = context.createStatement(connection);
-    assertSemanticException(statement, "create external table " + DB1 + ".tb1(a int) stored as " +
-        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
-    //Create external table on HDFS
-    assertSemanticException(statement, "create external table " + DB1 + ".tb2(a int) location '/user/hive/warehouse/blah'");
-    statement.close();
-    connection.close();
-
-    connection = context.createConnection(USER1_1);
-    statement = context.createStatement(connection);
-    statement.execute("create external table " + DB1 + ".tb1(a int) stored as " +
-        "textfile location 'file:" + externalTblDir.getAbsolutePath() + "'");
-    statement.close();
-    connection.close();
-
-
-  }
-
-  @Test
-  public void testCaseSensitivity() throws Exception {
-    Statement statement = null;
-    Connection connection = null;
-    try {
-      createDb(ADMIN1, DB1);
-      String scratchLikeDir = context.getProperty(HiveConf.ConfVars.SCRATCHDIR.varname);
-      LOGGER.info("scratch like dir = " + scratchLikeDir);
-      String extParentDir = scratchLikeDir + "/ABC/hhh";
-      String extTableDir = scratchLikeDir + "/abc/hhh";
-      LOGGER.info("Creating extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
-      dfs.assertCreateDir(extParentDir);
-      dfs.assertCreateDir(extTableDir);
-
-      if (! (extParentDir.toLowerCase().startsWith("hdfs://")
-          || extParentDir.toLowerCase().startsWith("s3://")
-          || extParentDir.contains("://"))) {
-        String scheme = fileSystem.getUri().toString();
-        LOGGER.info("scheme = " + scheme);
-        extParentDir = scheme + extParentDir;
-        extTableDir = scheme + extTableDir;
-        LOGGER.info("Add scheme in extParentDir = " + extParentDir + ", extTableDir = " + extTableDir);
-      }
-
-      policyFile
-          .addPermissionsToRole("all_db1", privileges.get("all_db1"))
-          .addPermissionsToRole("all_uri", "server=server1->uri=" + extParentDir)
-          .addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
-      writePolicyFile(policyFile);
-      connection = context.createConnection(USER1_1);
-      statement = context.createStatement(connection);
-      assertSemanticException(statement,
-          "create external table " + DB1 + ".tb1(a int) location '" + extTableDir + "'");
-    } finally {
-      if (statement != null) {
-        statement.close();
-      }
-      if (connection != null) {
-        connection.close();
-      }
-    }
-  }
-}