You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sh...@apache.org on 2013/09/21 01:38:58 UTC

[9/9] git commit: SENTRY-16: Move sentry-tests to sentry-tests-hive package (Gregory Chanan via Shreepadma Venugopalan)

SENTRY-16: Move sentry-tests to sentry-tests-hive package (Gregory Chanan via Shreepadma Venugopalan)


Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/aef404c6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/aef404c6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/aef404c6

Branch: refs/heads/master
Commit: aef404c6091785cd1e8879e7279b9eb45678c5d5
Parents: 3c232e1
Author: Shreepadma Venugopalan <sh...@apache.org>
Authored: Fri Sep 20 16:38:07 2013 -0700
Committer: Shreepadma Venugopalan <sh...@apache.org>
Committed: Fri Sep 20 16:38:07 2013 -0700

----------------------------------------------------------------------
 .../test/resources/sentry-deprecated-site.xml   |  53 ++
 sentry-tests/pom.xml                            | 312 +--------
 sentry-tests/sentry-tests-hive/pom.xml          | 334 +++++++++
 .../e2e/hive/AbstractTestWithHiveServer.java    |  85 +++
 .../AbstractTestWithStaticConfiguration.java    | 157 +++++
 .../e2e/hive/AbstractTestWithStaticDFS.java     |  69 ++
 .../e2e/hive/AbstractTestWithStaticLocalFS.java |  37 +
 .../apache/sentry/tests/e2e/hive/Context.java   | 210 ++++++
 .../e2e/hive/DummySentryOnFailureHook.java      |  32 +
 .../sentry/tests/e2e/hive/PolicyFileEditor.java |  78 +++
 .../sentry/tests/e2e/hive/TestCrossDbOps.java   | 691 +++++++++++++++++++
 .../sentry/tests/e2e/hive/TestEndToEnd.java     | 143 ++++
 .../e2e/hive/TestExportImportPrivileges.java    | 154 +++++
 .../e2e/hive/TestMetadataObjectRetrieval.java   | 453 ++++++++++++
 .../tests/e2e/hive/TestMetadataPermissions.java | 158 +++++
 .../tests/e2e/hive/TestMovingToProduction.java  | 231 +++++++
 .../tests/e2e/hive/TestPerDBConfiguration.java  | 486 +++++++++++++
 .../e2e/hive/TestPerDatabasePolicyFile.java     | 134 ++++
 .../e2e/hive/TestPrivilegeAtTransform.java      | 118 ++++
 .../e2e/hive/TestPrivilegesAtDatabaseScope.java | 464 +++++++++++++
 .../e2e/hive/TestPrivilegesAtFunctionScope.java | 177 +++++
 .../e2e/hive/TestPrivilegesAtTableScope.java    | 678 ++++++++++++++++++
 .../e2e/hive/TestRuntimeMetadataRetrieval.java  | 401 +++++++++++
 .../sentry/tests/e2e/hive/TestSandboxOps.java   | 585 ++++++++++++++++
 .../hive/TestSentryOnFailureHookLoading.java    | 129 ++++
 .../tests/e2e/hive/TestServerConfiguration.java | 209 ++++++
 .../tests/e2e/hive/TestUriPermissions.java      | 270 ++++++++
 .../tests/e2e/hive/TestUserManagement.java      | 333 +++++++++
 .../e2e/hive/hiveserver/AbstractHiveServer.java |  88 +++
 .../e2e/hive/hiveserver/EmbeddedHiveServer.java |  59 ++
 .../e2e/hive/hiveserver/ExternalHiveServer.java | 124 ++++
 .../tests/e2e/hive/hiveserver/HiveServer.java   |  34 +
 .../e2e/hive/hiveserver/HiveServerFactory.java  | 212 ++++++
 .../e2e/hive/hiveserver/InternalHiveServer.java |  55 ++
 .../hive/hiveserver/UnmanagedHiveServer.java    |  96 +++
 .../src/test/resources/emp.dat                  |  12 +
 .../src/test/resources/hive-site.xml            |  45 ++
 .../src/test/resources/kv1.dat                  | 500 ++++++++++++++
 .../src/test/resources/log4j.properties         |  35 +
 .../src/test/resources/sentry-site.xml          |  33 +
 .../src/test/resources/test-authz-provider.ini  |  25 +
 .../tests/e2e/AbstractTestWithHiveServer.java   |  85 ---
 .../AbstractTestWithStaticConfiguration.java    | 157 -----
 .../tests/e2e/AbstractTestWithStaticDFS.java    |  69 --
 .../e2e/AbstractTestWithStaticLocalFS.java      |  37 -
 .../org/apache/sentry/tests/e2e/Context.java    | 210 ------
 .../tests/e2e/DummySentryOnFailureHook.java     |  32 -
 .../sentry/tests/e2e/PolicyFileEditor.java      |  78 ---
 .../apache/sentry/tests/e2e/TestCrossDbOps.java | 691 -------------------
 .../apache/sentry/tests/e2e/TestEndToEnd.java   | 143 ----
 .../tests/e2e/TestExportImportPrivileges.java   | 154 -----
 .../tests/e2e/TestMetadataObjectRetrieval.java  | 453 ------------
 .../tests/e2e/TestMetadataPermissions.java      | 158 -----
 .../tests/e2e/TestMovingToProduction.java       | 231 -------
 .../tests/e2e/TestPerDBConfiguration.java       | 486 -------------
 .../tests/e2e/TestPerDatabasePolicyFile.java    | 134 ----
 .../tests/e2e/TestPrivilegeAtTransform.java     | 118 ----
 .../e2e/TestPrivilegesAtDatabaseScope.java      | 464 -------------
 .../e2e/TestPrivilegesAtFunctionScope.java      | 177 -----
 .../tests/e2e/TestPrivilegesAtTableScope.java   | 678 ------------------
 .../tests/e2e/TestRuntimeMetadataRetrieval.java | 401 -----------
 .../apache/sentry/tests/e2e/TestSandboxOps.java | 585 ----------------
 .../e2e/TestSentryOnFailureHookLoading.java     | 129 ----
 .../tests/e2e/TestServerConfiguration.java      | 209 ------
 .../sentry/tests/e2e/TestUriPermissions.java    | 270 --------
 .../sentry/tests/e2e/TestUserManagement.java    | 333 ---------
 .../e2e/hiveserver/AbstractHiveServer.java      |  88 ---
 .../e2e/hiveserver/EmbeddedHiveServer.java      |  59 --
 .../e2e/hiveserver/ExternalHiveServer.java      | 124 ----
 .../sentry/tests/e2e/hiveserver/HiveServer.java |  34 -
 .../tests/e2e/hiveserver/HiveServerFactory.java | 212 ------
 .../e2e/hiveserver/InternalHiveServer.java      |  55 --
 .../e2e/hiveserver/UnmanagedHiveServer.java     |  96 ---
 sentry-tests/src/test/resources/access-site.xml |  33 -
 sentry-tests/src/test/resources/emp.dat         |  12 -
 sentry-tests/src/test/resources/hive-site.xml   |  45 --
 sentry-tests/src/test/resources/kv1.dat         | 500 --------------
 .../src/test/resources/log4j.properties         |  35 -
 sentry-tests/src/test/resources/sentry-site.xml |  33 -
 .../src/test/resources/test-authz-provider.ini  |  25 -
 80 files changed, 8193 insertions(+), 8139 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-binding/sentry-binding-hive/src/test/resources/sentry-deprecated-site.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/test/resources/sentry-deprecated-site.xml b/sentry-binding/sentry-binding-hive/src/test/resources/sentry-deprecated-site.xml
new file mode 100644
index 0000000..e5c5a36
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/test/resources/sentry-deprecated-site.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+  <property>
+    <name>hive.sentry.provider</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.provider.resource</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.server</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.restrict.defaultDB</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.testing.mode</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.udf.whitelist</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.allow.hive.impersonation</name>
+    <value>deprecated</value>
+  </property>
+  <property>
+    <name>hive.sentry.failure.hooks</name>
+    <value>deprecated</value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/pom.xml b/sentry-tests/pom.xml
index 5536348..1b3c58d 100644
--- a/sentry-tests/pom.xml
+++ b/sentry-tests/pom.xml
@@ -25,310 +25,10 @@ limitations under the License.
   <artifactId>sentry-tests</artifactId>
   <name>Sentry Tests</name>
   <description>end to end tests for sentry project</description>
-  <properties>
-    <hadoop-dist></hadoop-dist>
-    <hive-dist>${hadoop-dist}</hive-dist>
-  </properties>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.thrift</groupId>
-      <artifactId>libthrift</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derby</artifactId>
-    </dependency>
-    <dependency>
-     <groupId>org.apache.thrift</groupId>
-      <artifactId>libthrift</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derby</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-core</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-service</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-shims</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-serde</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-metastore</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-pdk</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-hwi</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-jdbc</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-hbase-handler</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-contrib</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-common</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-cli</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-builtins</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-anttasks</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-annotations</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-auth</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-common</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-api</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-common</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-server-common</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-server-nodemanager</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.zookeeper</groupId>
-      <artifactId>zookeeper</artifactId>
-      <version>${zookeeper.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.easytesting</groupId>
-      <artifactId>fest-reflect</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-binding-hive</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.shiro</groupId>
-      <artifactId>shiro-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-file</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <systemPropertyVariables>
-            <buildDirectory>${project.build.directory}</buildDirectory>
-          </systemPropertyVariables>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <profiles>
-   <profile>
-     <id>download-hadoop</id>
-     <build>
-      <plugins>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-antrun-plugin</artifactId>
-          <configuration>
-            <skipTests>false</skipTests>
-          </configuration>
-          <executions>
-            <execution>
-              <id>download-hadoop</id>
-              <phase>generate-sources</phase>
-              <goals>
-                <goal>run</goal>
-              </goals>
-              <configuration>
-                <target>
-                  <echo file="target/download.sh">
-                    set -e
-                    set -x
-                    /bin/pwd
-                    BASE_DIR=./target
-                    DOWNLOAD_DIR=$BASE_DIR/downloads
-                    download() {
-                      url=$1;
-                      tarName=$2
-                      finalName=$3
-                      rm -rf $BASE_DIR/$finalName
-                      wget -nv -O $DOWNLOAD_DIR/$tarName $url
-                      tar -zxf $DOWNLOAD_DIR/$tarName -C $BASE_DIR
-                      rm $DOWNLOAD_DIR/$tarName
-                      mv $BASE_DIR/${finalName}* $BASE_DIR/$finalName
-                    }
-                    rm -rf $DOWNLOAD_DIR
-                    mkdir -p $DOWNLOAD_DIR
-                    download "http://archive.cloudera.com/cdh4/cdh/4/hadoop-latest.tar.gz" hadoop.tar.gz hadoop
-                    download "http://archive.cloudera.com/cdh4/cdh/4/hive-latest.tar.gz" hive.tar.gz hive
-                  </echo>
-                  <exec executable="sh" dir="${basedir}" failonerror="true">
-                    <arg line="target/download.sh"/>
-                  </exec>
-                </target>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-      </plugins>
-     </build>
-   </profile>
-   <profile>
-     <id>link-hadoop</id>
-     <build>
-      <plugins>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-antrun-plugin</artifactId>
-          <configuration>
-            <skipTests>false</skipTests>
-          </configuration>
-          <executions>
-            <execution>
-              <id>link-hadoop</id>
-              <phase>generate-sources</phase>
-              <goals>
-                <goal>run</goal>
-              </goals>
-              <configuration>
-                <target>
-                  <echo file="target/link_dist.sh">
-                    set -e
-                    set -x
-                    /bin/pwd
-                    BASE_DIR=./target
-                    rm -f $BASE_DIR/hadoop
-                    ln -s $1/hadoop $BASE_DIR/.
-                    rm -f $BASE_DIR/hive
-                    ln -s $2/hive $BASE_DIR/.
-                  </echo>
-                  <exec executable="sh" dir="${basedir}" failonerror="true">
-                    <arg line="target/link_dist.sh ${hadoop-dist} ${hive-dist}"/>
-                  </exec>
-                </target>
-              </configuration>
-            </execution>
-          </executions>
-        </plugin>
-      </plugins>
-     </build>
-   </profile>
-  </profiles>
-</project>
+  <packaging>pom</packaging>
+
+  <modules>
+    <module>sentry-tests-hive</module>
+  </modules>
 
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/pom.xml b/sentry-tests/sentry-tests-hive/pom.xml
new file mode 100644
index 0000000..27b45c0
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/pom.xml
@@ -0,0 +1,334 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry-tests</artifactId>
+    <version>1.3.0-incubating-SNAPSHOT</version>
+  </parent>
+  <artifactId>sentry-tests-hive</artifactId>
+  <name>Sentry Hive Tests</name>
+  <description>end to end tests for sentry-hive integration</description>
+  <properties>
+    <hadoop-dist></hadoop-dist>
+    <hive-dist>${hadoop-dist}</hive-dist>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+     <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-serde</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-pdk</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-hwi</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-hbase-handler</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-contrib</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-cli</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-builtins</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-anttasks</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-auth</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-common</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-nodemanager</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>${zookeeper.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.easytesting</groupId>
+      <artifactId>fest-reflect</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-binding-hive</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.shiro</groupId>
+      <artifactId>shiro-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-file</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <buildDirectory>${project.build.directory}</buildDirectory>
+          </systemPropertyVariables>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <profiles>
+   <profile>
+     <id>download-hadoop</id>
+     <build>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-antrun-plugin</artifactId>
+          <configuration>
+            <skipTests>false</skipTests>
+          </configuration>
+          <executions>
+            <execution>
+              <id>download-hadoop</id>
+              <phase>generate-sources</phase>
+              <goals>
+                <goal>run</goal>
+              </goals>
+              <configuration>
+                <target>
+                  <echo file="target/download.sh">
+                    set -e
+                    set -x
+                    /bin/pwd
+                    BASE_DIR=./target
+                    DOWNLOAD_DIR=$BASE_DIR/downloads
+                    download() {
+                      url=$1;
+                      tarName=$2
+                      finalName=$3
+                      rm -rf $BASE_DIR/$finalName
+                      wget -nv -O $DOWNLOAD_DIR/$tarName $url
+                      tar -zxf $DOWNLOAD_DIR/$tarName -C $BASE_DIR
+                      rm $DOWNLOAD_DIR/$tarName
+                      mv $BASE_DIR/${finalName}* $BASE_DIR/$finalName
+                    }
+                    rm -rf $DOWNLOAD_DIR
+                    mkdir -p $DOWNLOAD_DIR
+                    download "http://archive.cloudera.com/cdh4/cdh/4/hadoop-latest.tar.gz" hadoop.tar.gz hadoop
+                    download "http://archive.cloudera.com/cdh4/cdh/4/hive-latest.tar.gz" hive.tar.gz hive
+                  </echo>
+                  <exec executable="sh" dir="${basedir}" failonerror="true">
+                    <arg line="target/download.sh"/>
+                  </exec>
+                </target>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
+     </build>
+   </profile>
+   <profile>
+     <id>link-hadoop</id>
+     <build>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-antrun-plugin</artifactId>
+          <configuration>
+            <skipTests>false</skipTests>
+          </configuration>
+          <executions>
+            <execution>
+              <id>link-hadoop</id>
+              <phase>generate-sources</phase>
+              <goals>
+                <goal>run</goal>
+              </goals>
+              <configuration>
+                <target>
+                  <echo file="target/link_dist.sh">
+                    set -e
+                    set -x
+                    /bin/pwd
+                    BASE_DIR=./target
+                    rm -f $BASE_DIR/hadoop
+                    ln -s $1/hadoop $BASE_DIR/.
+                    rm -f $BASE_DIR/hive
+                    ln -s $2/hive $BASE_DIR/.
+                  </echo>
+                  <exec executable="sh" dir="${basedir}" failonerror="true">
+                    <arg line="target/link_dist.sh ${hadoop-dist} ${hive-dist}"/>
+                  </exec>
+                </target>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
+     </build>
+   </profile>
+  </profiles>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
new file mode 100644
index 0000000..25c64c7
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.hive;
+
+import java.io.File;
+import java.util.Map;
+
+import junit.framework.Assert;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServer;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
+import org.junit.After;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.io.Files;
+
+public abstract class AbstractTestWithHiveServer {
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(AbstractTestWithHiveServer.class);
+  protected File baseDir;
+  protected File logDir;
+  protected File confDir;
+  protected File dataDir;
+  protected File policyFile;
+  protected HiveServer hiveServer;
+  protected FileSystem fileSystem;
+
+  public Context createContext(Map<String, String> properties)
+      throws Exception {
+    fileSystem = FileSystem.get(new Configuration());
+    baseDir = Files.createTempDir();
+    LOGGER.info("BaseDir = " + baseDir);
+    logDir = assertCreateDir(new File(baseDir, "log"));
+    confDir = assertCreateDir(new File(baseDir, "etc"));
+    dataDir = assertCreateDir(new File(baseDir, "data"));
+    policyFile = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
+    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile, fileSystem);
+    hiveServer.start();
+    return new Context(hiveServer, getFileSystem(),
+        baseDir, confDir, dataDir, policyFile);
+  }
+
+  protected static File assertCreateDir(File dir) {
+    if(!dir.isDirectory()) {
+      Assert.assertTrue("Failed creating " + dir, dir.mkdirs());
+    }
+    return dir;
+  }
+
+  protected FileSystem getFileSystem() {
+    return fileSystem;
+  }
+
+  @After
+  public void tearDownWithHiveServer() throws Exception {
+    if(hiveServer != null) {
+      hiveServer.shutdown();
+      hiveServer = null;
+    }
+    if(baseDir != null) {
+      if(System.getProperty(HiveServerFactory.KEEP_BASEDIR) == null) {
+        FileUtils.deleteQuietly(baseDir);
+      }
+      baseDir = null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
new file mode 100644
index 0000000..e56eb92
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.hive;
+
+import java.io.File;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Map;
+
+import junit.framework.Assert;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServer;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
+
+public abstract class AbstractTestWithStaticConfiguration {
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(AbstractTestWithStaticConfiguration.class);
+  protected static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat";
+  protected static final String ADMIN1 = "admin1";
+  protected static final String ALL_DB1 = "server=server1->db=db_1",      
+      ALL_DB2 = "server=server1->db=db_2",
+      SELECT_DB1_TBL1 = "server=server1->db=db_1->table=tb_1->action=select",
+      SELECT_DB1_TBL2 = "server=server1->db=db_1->table=tb_2->action=select",
+      SELECT_DB1_NONTABLE = "server=server1->db=db_1->table=this table does not exist->action=select",
+      INSERT_DB1_TBL1 = "server=server1->db=db_1->table=tb_1->action=insert",
+      INSERT_DB1_TBL2 = "server=server1->db=db_1->table=tb_2->action=insert",
+      SELECT_DB2_TBL2 = "server=server1->db=db_2->table=tb_2->action=select",
+      INSERT_DB2_TBL1 = "server=server1->db=db_2->table=tb_1->action=insert",
+      SELECT_DB1_VIEW1 = "server=server1->db=db_1->table=view_1->action=select",
+      USER1 = "user1",
+      USER2 = "user2",
+      GROUP1 = "group1",
+      GROUP1_ROLE = "group1_role",
+      DB1 = "db_1",
+      DB2 = "db_2",
+      DB3 = "db_3",
+      TBL1 = "tb_1",
+      TBL2 = "tb_2",
+      TBL3 = "tb_3",
+      VIEW1 = "view_1",
+      VIEW2 = "view_2",
+      VIEW3 = "view_3",
+      INDEX1 = "index_1",
+      INDEX2 = "index_2";
+
+
+  protected static File baseDir;
+  protected static File logDir;
+  protected static File confDir;
+  protected static File dataDir;
+  protected static File policyFile;
+  protected static HiveServer hiveServer;
+  protected static FileSystem fileSystem;
+  protected static Map<String, String> properties;
+  protected Context context;
+
+  public Context createContext() throws Exception {
+    return new Context(hiveServer, fileSystem,
+        baseDir, confDir, dataDir, policyFile);
+  }
+  protected void dropDb(String user, String...dbs) throws Exception {
+    Connection connection = context.createConnection(user, "password");
+    Statement statement = connection.createStatement();
+    for(String db : dbs) {
+      statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
+    }
+    statement.close();
+    connection.close();
+  }
+  protected void createDb(String user, String...dbs) throws Exception {
+    Connection connection = context.createConnection(user, "password");
+    Statement statement = connection.createStatement();
+    for(String db : dbs) {
+      statement.execute("CREATE DATABASE " + db);
+    }
+    statement.close();
+    connection.close();
+  }
+  protected void createTable(String user, String db, File dataFile, String...tables)
+      throws Exception {
+    Connection connection = context.createConnection(user, "password");
+    Statement statement = connection.createStatement();
+    statement.execute("USE " + db);
+    for(String table : tables) {
+      statement.execute("DROP TABLE IF EXISTS " + table);
+      statement.execute("create table " + table
+          + " (under_col int comment 'the under column', value string)");
+      statement.execute("load data local inpath '" + dataFile.getPath()
+          + "' into table " + table);
+      ResultSet res = statement.executeQuery("select * from " + table);
+      Assert.assertTrue("Table should have data after load", res.next());
+      res.close();
+    }
+    statement.close();
+    connection.close();
+  }
+
+  protected static File assertCreateDir(File dir) {
+    if(!dir.isDirectory()) {
+      Assert.assertTrue("Failed creating " + dir, dir.mkdirs());
+    }
+    return dir;
+  }
+
+  protected FileSystem getFileSystem() {
+    return fileSystem;
+  }
+  @BeforeClass
+  public static void setupTestStaticConfiguration()
+      throws Exception {
+    properties = Maps.newHashMap();
+    baseDir = Files.createTempDir();
+    LOGGER.info("BaseDir = " + baseDir);
+    logDir = assertCreateDir(new File(baseDir, "log"));
+    confDir = assertCreateDir(new File(baseDir, "etc"));
+    dataDir = assertCreateDir(new File(baseDir, "data"));
+    policyFile = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
+  }
+
+  @AfterClass
+  public static void tearDownTestStaticConfiguration() throws Exception {
+    if(hiveServer != null) {
+      hiveServer.shutdown();
+      hiveServer = null;
+    }
+    if(baseDir != null) {
+      if(System.getProperty(HiveServerFactory.KEEP_BASEDIR) == null) {
+        FileUtils.deleteQuietly(baseDir);
+      }
+      baseDir = null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticDFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticDFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticDFS.java
new file mode 100644
index 0000000..f670f89
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticDFS.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.hive;
+
+import java.io.File;
+import java.io.IOException;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+
+public abstract class AbstractTestWithStaticDFS extends AbstractTestWithStaticConfiguration {
+
+  protected static MiniDFSCluster dfsCluster;
+  protected static Path dfsBaseDir;
+
+  @Before
+  public void setupTestWithDFS() throws IOException {
+    Assert.assertTrue(dfsBaseDir.toString(), fileSystem.delete(dfsBaseDir, true));
+    Assert.assertTrue(dfsBaseDir.toString(), fileSystem.mkdirs(dfsBaseDir));
+  }
+
+  protected static Path assertCreateDfsDir(Path dir) throws IOException {
+    if(!fileSystem.isDirectory(dir)) {
+      Assert.assertTrue("Failed creating " + dir, fileSystem.mkdirs(dir));
+    }
+    return dir;
+  }
+  @BeforeClass
+  public static void setupTestWithStaticDFS()
+      throws Exception {
+    Configuration conf = new Configuration();
+    File dfsDir = assertCreateDir(new File(baseDir, "dfs"));
+    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dfsDir.getPath());
+    dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+    fileSystem = dfsCluster.getFileSystem();
+    dfsBaseDir = assertCreateDfsDir(new Path(new Path(fileSystem.getUri()), "/base"));
+    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile, fileSystem);
+    hiveServer.start();
+  }
+
+  @AfterClass
+  public static void tearDownTestWithStaticDFS() throws Exception {
+    if(dfsCluster != null) {
+      dfsCluster.shutdown();
+      dfsCluster = null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticLocalFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticLocalFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticLocalFS.java
new file mode 100644
index 0000000..3954b9a
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticLocalFS.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.tests.e2e.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractTestWithStaticLocalFS extends AbstractTestWithStaticConfiguration {
+  @SuppressWarnings("unused")
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(AbstractTestWithStaticLocalFS.class);
+  @BeforeClass
+  public static void setupTestWithStaticHiveServer()
+      throws Exception {
+    fileSystem = FileSystem.get(new Configuration());
+    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile, fileSystem);
+    hiveServer.start();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
new file mode 100644
index 0000000..f86ae6d
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/Context.java
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URI;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Set;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Charsets;
+import com.google.common.collect.Sets;
+import com.google.common.io.Files;
+
+public class Context {
+
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(Context.class);
+
+  public static final String AUTHZ_EXCEPTION_SQL_STATE = "42000";
+  public static final String AUTHZ_EXEC_HOOK_EXCEPTION_SQL_STATE = "08S01";
+  public static final String AUTHZ_EXCEPTION_ERROR_MSG = "No valid privileges";
+
+  private final HiveServer hiveServer;
+  private final FileSystem fileSystem;
+  private final File baseDir;
+  private final File dataDir;
+
+  private final File policyFile;
+  private final Set<Connection> connections;
+  private final Set<Statement> statements;
+
+
+  public Context(HiveServer hiveServer, FileSystem fileSystem,
+      File baseDir, File confDir, File dataDir, File policyFile) throws Exception {
+    this.hiveServer = hiveServer;
+    this.fileSystem = fileSystem;
+    this.baseDir = baseDir;
+    this.dataDir = dataDir;
+    this.policyFile = policyFile;
+    connections = Sets.newHashSet();
+    statements = Sets.newHashSet();
+  }
+
+  public Connection createConnection(String username, String password) throws Exception {
+    Connection connection =  hiveServer.createConnection(username, password);
+    connections.add(connection);
+    assertNotNull("Connection is null", connection);
+    assertFalse("Connection should not be closed", connection.isClosed());
+    Statement statement  = connection.createStatement();
+    statement.close();
+    return connection;
+  }
+
+  public Statement createStatement(Connection connection)
+  throws Exception {
+    Statement statement  = connection.createStatement();
+    assertNotNull("Statement is null", statement);
+    statements.add(statement);
+    return statement;
+  }
+  /**
+   * Deprecated} use append()
+   */
+  public void writePolicyFile(String buf) throws IOException {
+    FileOutputStream out = new FileOutputStream(policyFile);
+    out.write(buf.getBytes(Charsets.UTF_8));
+    out.close();
+  }
+  /**
+   * Deprecated} use append()
+   */
+  @Deprecated
+  public void appendToPolicyFileWithNewLine(String line) throws IOException {
+    append(line);
+  }
+  public void append(String...lines) throws IOException {
+    StringBuffer buffer = new StringBuffer();
+    for(String line : lines) {
+      buffer.append(line).append("\n");
+    }
+    Files.append(buffer, policyFile, Charsets.UTF_8);
+  }
+
+  public boolean deletePolicyFile() throws IOException {
+     return policyFile.delete();
+  }
+  /**
+   * Deprecated} use append()
+   */
+  public void makeNewPolicy(String policyLines[]) throws FileNotFoundException {
+    PrintWriter policyWriter = new PrintWriter (policyFile.toString());
+    for (String line : policyLines) {
+      policyWriter.println(line);
+    }
+    policyWriter.close();
+    assertFalse(policyWriter.checkError());
+  }
+
+  public void close() {
+    for(Statement statement : statements) {
+      try {
+        statement.close();
+      } catch (SQLException exception) {
+        LOGGER.warn("Error closing " + statement, exception);
+      }
+    }
+    statements.clear();
+
+    for(Connection connection : connections) {
+      try {
+        connection.close();
+      } catch (SQLException exception) {
+        LOGGER.warn("Error closing " + connection, exception);
+      }
+    }
+    connections.clear();
+  }
+
+  public void assertAuthzException(Statement statement, String query)
+      throws SQLException {
+    try {
+      statement.execute(query);
+      Assert.fail("Expected SQLException for '" + query + "'");
+    } catch (SQLException e) {
+      verifyAuthzException(e);
+    }
+  }
+
+  public void assertAuthzExecHookException(Statement statement, String query)
+      throws SQLException {
+    try {
+      statement.execute(query);
+      Assert.fail("Expected SQLException for '" + query + "'");
+    } catch (SQLException e) {
+      verifyAuthzExecHookException(e);
+    }
+  }
+
+
+  // verify that the sqlexception is due to authorization failure
+  public void verifyAuthzException(SQLException sqlException) throws SQLException{
+    verifyAuthzExceptionForState(sqlException, AUTHZ_EXCEPTION_SQL_STATE);
+  }
+
+  // verify that the sqlexception is due to authorization failure due to exec hooks
+  public void verifyAuthzExecHookException(SQLException sqlException) throws SQLException{
+    verifyAuthzExceptionForState(sqlException, AUTHZ_EXEC_HOOK_EXCEPTION_SQL_STATE);
+  }
+
+  // verify that the sqlexception is due to authorization failure
+  private void verifyAuthzExceptionForState(SQLException sqlException,
+        String expectedSqlState) throws SQLException {
+    if (!expectedSqlState.equals(sqlException.getSQLState())) {
+      throw sqlException;
+    }
+  }
+
+  public File getBaseDir() {
+    return baseDir;
+  }
+
+  public File getDataDir() {
+    return dataDir;
+  }
+
+  public File getPolicyFile() {
+    return policyFile;
+  }
+
+  @SuppressWarnings("static-access")
+  public URI getDFSUri() throws IOException {
+    return fileSystem.getDefaultUri(fileSystem.getConf());
+  }
+
+  public String getProperty(String propName) {
+    return hiveServer.getProperty(propName);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/DummySentryOnFailureHook.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/DummySentryOnFailureHook.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/DummySentryOnFailureHook.java
new file mode 100644
index 0000000..e4055a7
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/DummySentryOnFailureHook.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import org.apache.sentry.binding.hive.SentryOnFailureHook;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+
+public class DummySentryOnFailureHook implements SentryOnFailureHook {
+
+  static boolean invoked = false;
+
+  @Override
+  public void run(SentryOnFailureHookContext failureHookContext)
+      throws Exception {
+    invoked = true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/PolicyFileEditor.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/PolicyFileEditor.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/PolicyFileEditor.java
new file mode 100644
index 0000000..1c9511d
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/PolicyFileEditor.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+
+
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.io.Files;
+
+/**
+ * Deprecated} use Context.append()
+ */
+public class PolicyFileEditor {
+
+  private static final String NL = System.getProperty("line.separator", "\n");
+
+  private File policy;
+
+  public PolicyFileEditor (File policy) throws IOException {
+    policy.delete();
+    policy.createNewFile();
+    this.policy = policy;
+  }
+
+  public void clearOldPolicy() throws IOException {
+    policy.delete();
+    policy.createNewFile();
+  }
+
+  public void addPolicy(String line, String cat) throws IOException {
+    List<String> result = new ArrayList<String>();
+    boolean exist = false;
+    for(String s : Files.readLines(policy, Charsets.UTF_8)) {
+      result.add(s);
+      if (s.equals("[" + cat + "]")) {
+        result.add(line);
+        exist = true;
+       }
+    }
+    if (!exist) {
+      result.add("[" + cat + "]");
+      result.add(line);
+    }
+    Files.write(Joiner.on(NL).join(result), policy, Charsets.UTF_8);
+  }
+  public void removePolicy(String line) throws IOException {
+    List<String> result = Lists.newArrayList();
+    for(String s : Files.readLines(policy, Charsets.UTF_8)) {
+      if (!s.equals(line)) {
+        result.add(s);
+      }
+    }
+    Files.write(Joiner.on(NL).join(result), policy, Charsets.UTF_8);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/aef404c6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
new file mode 100644
index 0000000..45854e9
--- /dev/null
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
@@ -0,0 +1,691 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.hive;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.sentry.provider.file.PolicyFile;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.io.Resources;
+
+/* Tests privileges at table scope with cross database access */
+
+public class TestCrossDbOps extends AbstractTestWithStaticLocalFS {
+  private File dataFile;
+  private PolicyFile policyFile;
+  private String loadData;
+
+  @Before
+  public void setup() throws Exception {
+    context = createContext();
+    File dataDir = context.getDataDir();
+    // copy data file to test dir
+    dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+    policyFile = PolicyFile.createAdminOnServer1(ADMIN1);
+    loadData = "server=server1->uri=file://" + dataFile.getPath();
+
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (context != null) {
+      context.close();
+    }
+  }
+
+  /*
+   * Admin creates DB_1, DB2, tables (tab_1 ) and (tab_2, tab_3) in DB_1 and
+   * DB_2 respectively. User user1 has select on DB_1.tab_1, insert on
+   * DB2.tab_2 User user2 has select on DB2.tab_3 Test show database and show
+   * tables for both user1 and user2
+   */
+  @Test
+  public void testShowDatabasesAndShowTables() throws Exception {
+    // edit policy file
+    policyFile
+        .addRolesToGroup("group1", "select_tab1", "insert_tab2")
+        .addRolesToGroup("group2", "select_tab3")
+        .addPermissionsToRole("select_tab1",  "server=server1->db=db1->table=tab1->action=select")
+        .addPermissionsToRole("select_tab3", "server=server1->db=db2->table=tab3->action=select")
+        .addPermissionsToRole("insert_tab2", "server=server1->db=db2->table=tab2->action=insert")
+        .addGroupsToUser("user1", "group1")
+        .addGroupsToUser("user2", "group2");
+    policyFile.write(context.getPolicyFile());
+
+    // admin create two databases
+    Connection connection = context.createConnection(ADMIN1, "foo");
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB_2 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB1 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB2 CASCADE");
+
+    statement.execute("CREATE DATABASE DB1");
+    statement.execute("CREATE DATABASE DB2");
+    statement.execute("USE DB1");
+    statement.execute("CREATE TABLE TAB1(id int)");
+    statement.executeQuery("SHOW TABLES");
+    statement.execute("USE DB2");
+    statement.execute("CREATE TABLE TAB2(id int)");
+    statement.execute("CREATE TABLE TAB3(id int)");
+
+    // test show databases
+    // show databases shouldn't filter any of the dbs from the resultset
+    Connection conn = context.createConnection("user1", "");
+    Statement stmt = context.createStatement(conn);
+    ResultSet res = stmt.executeQuery("SHOW DATABASES");
+    List<String> result = new ArrayList<String>();
+    result.add("db1");
+    result.add("db2");
+    result.add("default");
+
+    while (res.next()) {
+      String dbName = res.getString(1);
+      assertTrue(dbName, result.remove(dbName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    // test show tables
+    stmt.execute("USE DB1");
+    res = stmt.executeQuery("SHOW TABLES");
+    result.clear();
+    result.add("tab1");
+
+    while (res.next()) {
+      String tableName = res.getString(1);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    stmt.execute("USE DB2");
+    res = stmt.executeQuery("SHOW TABLES");
+    result.clear();
+    result.add("tab2");
+
+    while (res.next()) {
+      String tableName = res.getString(1);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    stmt.close();
+    conn.close();
+
+    // test show databases and show tables for user2
+    conn = context.createConnection("user2", "");
+    stmt = context.createStatement(conn);
+    res = stmt.executeQuery("SHOW DATABASES");
+    result.clear();
+    result.add("db2");
+    result.add("default");
+
+    while (res.next()) {
+      String dbName = res.getString(1);
+      assertTrue(dbName, result.remove(dbName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    // test show tables
+    stmt.execute("USE DB2");
+    res = stmt.executeQuery("SHOW TABLES");
+    result.clear();
+    result.add("tab3");
+
+    while (res.next()) {
+      String tableName = res.getString(1);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    try {
+      stmt.execute("USE DB1");
+      Assert.fail("Expected SQL exception");
+    } catch (SQLException e) {
+      context.verifyAuthzException(e);
+    }
+    context.close();
+  }
+
+  /*
+   * Admin creates DB_1, DB2, tables (tab_1 ) and (tab_2, tab_3) in DB_1 and
+   * DB_2 respectively. User user1 has select on DB_1.tab_1, insert on
+   * DB2.tab_2 User user2 has select on DB2.tab_3 Test show database and show
+   * tables for both user1 and user2
+   */
+  @Test
+  public void testJDBCGetSchemasAndGetTables() throws Exception {
+    // edit policy file
+    policyFile.addRolesToGroup("group1", "select_tab1", "insert_tab2")
+        .addRolesToGroup("group2", "select_tab3")
+        .addPermissionsToRole("select_tab1", "server=server1->db=db1->table=tab1->action=select")
+        .addPermissionsToRole("select_tab3", "server=server1->db=db2->table=tab3->action=select")
+        .addPermissionsToRole("insert_tab2", "server=server1->db=db2->table=tab2->action=insert")
+        .addGroupsToUser("user1", "group1")
+        .addGroupsToUser("user2", "group2");
+    policyFile.write(context.getPolicyFile());
+
+    // admin create two databases
+    Connection connection = context.createConnection(ADMIN1, "foo");
+    Statement statement = context.createStatement(connection);
+    statement.execute("DROP DATABASE IF EXISTS DB_1 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB_2 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB1 CASCADE");
+    statement.execute("DROP DATABASE IF EXISTS DB2 CASCADE");
+
+    statement.execute("CREATE DATABASE DB1");
+    statement.execute("CREATE DATABASE DB2");
+    statement.execute("USE DB1");
+    statement.execute("CREATE TABLE TAB1(id int)");
+    statement.executeQuery("SHOW TABLES");
+    statement.execute("USE DB2");
+    statement.execute("CREATE TABLE TAB2(id int)");
+    statement.execute("CREATE TABLE TAB3(id int)");
+
+    // test show databases
+    // show databases shouldn't filter any of the dbs from the resultset
+    Connection conn = context.createConnection("user1", "");
+    List<String> result = new ArrayList<String>();
+
+    // test direct JDBC metadata API
+    ResultSet res = conn.getMetaData().getSchemas();
+    ResultSetMetaData resMeta = res.getMetaData();
+    assertEquals(2, resMeta.getColumnCount());
+    assertEquals("TABLE_SCHEM", resMeta.getColumnName(1));
+    assertEquals("TABLE_CATALOG", resMeta.getColumnName(2));
+
+    result.add("db1");
+    result.add("db2");
+    result.add("default");
+
+    while (res.next()) {
+      String dbName = res.getString(1);
+      assertTrue(dbName, result.remove(dbName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    // test direct JDBC metadata API
+    res = conn.getMetaData().getTables(null, "DB1", "tab%", null);
+    result.add("tab1");
+
+    while (res.next()) {
+      String tableName = res.getString(3);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    // test direct JDBC metadata API
+    res = conn.getMetaData().getTables(null, "DB2", "tab%", null);
+    result.add("tab2");
+
+    while (res.next()) {
+      String tableName = res.getString(3);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    res = conn.getMetaData().getTables(null, "DB%", "tab%", null);
+    result.add("tab2");
+    result.add("tab1");
+
+    while (res.next()) {
+      String tableName = res.getString(3);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    //test show columns
+    res = conn.getMetaData().getColumns(null, "DB%", "tab%","i%" );
+    result.add("id");
+    result.add("id");
+
+    while (res.next()) {
+      String columnName = res.getString(4);
+      assertTrue(columnName, result.remove(columnName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    conn.close();
+
+    // test show databases and show tables for user2
+    conn = context.createConnection("user2", "");
+
+    // test direct JDBC metadata API
+    res = conn.getMetaData().getSchemas();
+    resMeta = res.getMetaData();
+    assertEquals(2, resMeta.getColumnCount());
+    assertEquals("TABLE_SCHEM", resMeta.getColumnName(1));
+    assertEquals("TABLE_CATALOG", resMeta.getColumnName(2));
+
+    result.add("db2");
+    result.add("default");
+
+    while (res.next()) {
+      String dbName = res.getString(1);
+      assertTrue(dbName, result.remove(dbName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    // test JDBC direct API
+    res = conn.getMetaData().getTables(null, "DB%", "tab%", null);
+    result.add("tab3");
+
+    while (res.next()) {
+      String tableName = res.getString(3);
+      assertTrue(tableName, result.remove(tableName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    //test show columns
+    res = conn.getMetaData().getColumns(null, "DB%", "tab%","i%" );
+    result.add("id");
+
+    while (res.next()) {
+      String columnName = res.getString(4);
+      assertTrue(columnName, result.remove(columnName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    //test show columns
+    res = conn.getMetaData().getColumns(null, "DB1", "tab%","i%" );
+
+    while (res.next()) {
+      String columnName = res.getString(4);
+      assertTrue(columnName, result.remove(columnName));
+    }
+    assertTrue(result.toString(), result.isEmpty());
+    res.close();
+
+    context.close();
+  }
+
+  /**
+   * 2.8 admin user create two database, DB_1, DB_2 admin grant all to USER1,
+   * USER2 on DB_1, admin grant all to user1's group, user2's group on DB_2
+   * positive test case: user1, user2 has ALL privilege on both DB_1 and DB_2
+   * negative test case: user1, user2 don't have ALL privilege on SERVER
+   */
+  @Test
+  public void testDbPrivileges() throws Exception {
+    // edit policy file
+    policyFile.addRolesToGroup("user_group", "db1_all,db2_all, load_data")
+        .addPermissionsToRole("db1_all", "server=server1->db=" + DB1)
+        .addPermissionsToRole("db2_all", "server=server1->db=" + DB2)
+        .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
+        .addGroupsToUser("user1", "user_group")
+        .addGroupsToUser("user2", "user_group");
+    policyFile.write(context.getPolicyFile());
+
+    dropDb(ADMIN1, DB1, DB2);
+    createDb(ADMIN1, DB1, DB2);
+    for (String user : new String[]{USER1, USER2}) {
+      for (String dbName : new String[]{DB1, DB2}) {
+        Connection userConn = context.createConnection(user, "foo");
+        String tabName = user + "_tab1";
+        Statement userStmt = context.createStatement(userConn);
+        // Positive case: test user1 and user2 has permissions to access
+        // db1 and
+        // db2
+        userStmt
+        .execute("create table " + dbName + "." + tabName + " (id int)");
+        userStmt.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath()
+            + "' INTO TABLE " + dbName + "." + tabName);
+        userStmt.execute("select * from " + dbName + "." + tabName);
+        context.close();
+      }
+    }
+  }
+
+  /**
+   * Test Case 2.11 admin user create a new database DB_1 and grant ALL to
+   * himself on DB_1 should work
+   */
+  @Test
+  public void testAdminDbPrivileges() throws Exception {
+    policyFile.write(context.getPolicyFile());
+    dropDb(ADMIN1, DB1);
+    createDb(ADMIN1, DB1);
+    Connection adminCon = context.createConnection(ADMIN1, "password");
+    Statement adminStmt = context.createStatement(adminCon);
+    String tabName = DB1 + "." + "admin_tab1";
+    adminStmt.execute("create table " + tabName + "(c1 string)");
+    adminStmt.execute("load data local inpath '" + dataFile.getPath() + "' into table "
+        + tabName);
+    assertTrue(adminStmt.executeQuery("select * from " + tabName).next());
+    adminStmt.close();
+    adminCon.close();
+  }
+
+  /**
+   * Test Case 2.14 admin user create a new database DB_1 create TABLE_1 in DB_1
+   * admin user grant INSERT to user1's group on TABLE_1 negative test case:
+   * user1 try to do following on TABLE_1 will fail: --explain --analyze
+   * --describe --describe function --show columns --show table status --show
+   * table properties --show create table --show partitions --show indexes
+   * --select * from TABLE_1.
+   */
+  @Test
+  public void testNegativeUserPrivileges() throws Exception {
+    // edit policy file
+    policyFile.addRolesToGroup("user_group", "db1_tab1_insert", "db1_tab2_all")
+        .addPermissionsToRole("db1_tab2_all", "server=server1->db=db1->table=table_2")
+        .addPermissionsToRole("db1_tab1_insert", "server=server1->db=db1->table=table_1->action=insert")
+        .addGroupsToUser("user3", "user_group");
+    policyFile.write(context.getPolicyFile());
+
+    Connection adminCon = context.createConnection(ADMIN1, "foo");
+    Statement adminStmt = context.createStatement(adminCon);
+    String dbName = "db1";
+    adminStmt.execute("use default");
+    adminStmt.execute("DROP DATABASE IF EXISTS " + dbName + " CASCADE");
+    adminStmt.execute("CREATE DATABASE " + dbName);
+    adminStmt.execute("create table " + dbName + ".table_1 (id int)");
+    adminStmt.close();
+    adminCon.close();
+    Connection userConn = context.createConnection("user3", "foo");
+    Statement userStmt = context.createStatement(userConn);
+    context.assertAuthzException(userStmt, "select * from " + dbName + ".table_1");
+    userConn.close();
+    userStmt.close();
+  }
+
+  /**
+   * Test Case 2.16 admin user create a new database DB_1 create TABLE_1 and
+   * TABLE_2 (same schema) in DB_1 admin user grant SELECT, INSERT to user1's
+   * group on TABLE_2 negative test case: user1 try to do following on TABLE_1
+   * will fail: --insert overwrite TABLE_2 select * from TABLE_1
+   */
+  @Test
+  public void testNegativeUserDMLPrivileges() throws Exception {
+    policyFile
+        .addPermissionsToRole("db1_tab2_all", "server=server1->db=db1->table=table_2")
+        .addRolesToGroup("group1", "db1_tab2_all")
+        .addGroupsToUser("user3", "group1");
+    policyFile.write(context.getPolicyFile());
+
+    dropDb(ADMIN1, DB1);
+    createDb(ADMIN1, DB1);
+    Connection adminCon = context.createConnection(ADMIN1, "password");
+    Statement adminStmt = context.createStatement(adminCon);
+    adminStmt.execute("create table " + DB1 + ".table_1 (id int)");
+    adminStmt.execute("create table " + DB1 + ".table_2 (id int)");
+    adminStmt.close();
+    adminCon.close();
+    Connection userConn = context.createConnection("user3", "foo");
+    Statement userStmt = context.createStatement(userConn);
+    context.assertAuthzException(userStmt, "insert overwrite table  " + DB1
+        + ".table_2 select * from " + DB1 + ".table_1");
+    context.assertAuthzException(userStmt, "insert overwrite directory '" + dataDir.getPath()
+        + "' select * from  " + DB1 + ".table_1");
+    userStmt.close();
+    userConn.close();
+  }
+
+  /**
+   * Test Case 2.17 Execution steps
+   * a) Admin user creates a new database DB_1,
+   * b) Admin user grants ALL on DB_1 to group GROUP_1
+   * c) User from GROUP_1 creates table TAB_1, TAB_2 in DB_1
+   * d) Admin user grants SELECT on TAB_1 to group GROUP_2
+   *
+   * 1) verify users from GROUP_2 have only SELECT privileges on TAB_1. They
+   * shouldn't be able to perform any operation other than those listed as
+   * requiring SELECT in the privilege model.
+   *
+   * 2) verify users from GROUP_2 can't perform queries involving join between
+   * TAB_1 and TAB_2.
+   * 
+   * 3) verify users from GROUP_1 can't perform operations requiring ALL @
+   * SERVER scope. Refer to list
+   */
+  @Test
+  public void testNegUserPrivilegesAll() throws Exception {
+
+    policyFile
+        .addRolesToGroup("user_group1", "db1_all")
+        .addRolesToGroup("user_group2", "db1_tab1_select")
+        .addPermissionsToRole("db1_all", "server=server1->db=db1")
+        .addPermissionsToRole("db1_tab1_select", "server=server1->db=db1->table=table_1->action=select")
+        .addGroupsToUser("user1", "user_group1")
+        .addGroupsToUser("user2", "user_group2");
+    policyFile.write(context.getPolicyFile());
+
+    // create dbs
+    Connection adminCon = context.createConnection(ADMIN1, "foo");
+    Statement adminStmt = context.createStatement(adminCon);
+    String dbName = "db1";
+    adminStmt.execute("use default");
+    adminStmt.execute("create table table_def (name string)");
+    adminStmt
+    .execute("load data local inpath '" + dataFile.getPath() + "' into table table_def");
+
+    adminStmt.execute("DROP DATABASE IF EXISTS " + dbName + " CASCADE");
+    adminStmt.execute("CREATE DATABASE " + dbName);
+    adminStmt.execute("use " + dbName);
+
+    adminStmt.execute("create table table_1 (name string)");
+    adminStmt
+    .execute("load data local inpath '" + dataFile.getPath() + "' into table table_1");
+    adminStmt.execute("create table table_2 (name string)");
+    adminStmt
+    .execute("load data local inpath '" + dataFile.getPath() + "' into table table_2");
+    adminStmt.execute("create view v1 AS select * from table_1");
+    adminStmt
+    .execute("create table table_part_1 (name string) PARTITIONED BY (year INT)");
+    adminStmt.execute("ALTER TABLE table_part_1 ADD PARTITION (year = 2012)");
+
+    adminStmt.close();
+    adminCon.close();
+
+    Connection userConn = context.createConnection("user2", "foo");
+    Statement userStmt = context.createStatement(userConn);
+
+    context.assertAuthzException(userStmt, "drop database " + dbName);
+
+    // Hive currently doesn't support cross db index DDL
+
+    context.assertAuthzException(userStmt, "CREATE TEMPORARY FUNCTION strip AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf'");
+    context.assertAuthzException(userStmt, "create table  " + dbName
+        + ".c_tab_2 as select * from  " + dbName + ".table_2");
+    context.assertAuthzException(userStmt, "select * from  " + dbName + ".table_2");
+    context.assertAuthzException(userStmt, "ALTER DATABASE " + dbName
+        + " SET DBPROPERTIES ('foo' = 'bar')");
+    context.assertAuthzException(userStmt, "drop table " + dbName + ".table_1");
+    context.assertAuthzException(userStmt, "DROP VIEW IF EXISTS " + dbName + ".v1");
+    context.assertAuthzException(userStmt, "create table " + dbName + ".table_5 (name string)");
+    context.assertAuthzException(userStmt, "ALTER TABLE " + dbName + ".table_1  RENAME TO "
+        + dbName + ".table_99");
+    context.assertAuthzException(userStmt, "insert overwrite table " + dbName
+        + ".table_2 select * from " + dbName + ".table_1");
+    context.assertAuthzException(userStmt, "insert overwrite table " + dbName
+        + ".table_2 select * from " + "table_def");
+    context.assertAuthzException(userStmt, "ALTER TABLE " + dbName
+        + ".table_part_1 ADD IF NOT EXISTS PARTITION (year = 2012)");
+    context.assertAuthzException(userStmt, "ALTER TABLE " + dbName
+        + ".table_part_1 PARTITION (year = 2012) SET LOCATION '/etc'");
+    userStmt.close();
+    userConn.close();
+  }
+
+  /**
+   * Steps: 1. admin user create databases, DB_1 and DB_2, no table or other
+   * object in database
+   * 2. admin grant all to user1's group on DB_1 and DB_2
+   * positive test case:
+   *  a)user1 has the privilege to create table, load data,
+   *   drop table, create view, insert more data on both databases
+   * b) user1 can switch between DB_1 and DB_2 without exception
+   * negative test case:
+   * c) user1 cannot drop database
+   */
+  @Test
+  public void testSandboxOpt9() throws Exception {
+    policyFile
+        .addPermissionsToRole(GROUP1_ROLE, ALL_DB1, ALL_DB2, loadData)
+        .addRolesToGroup(GROUP1, GROUP1_ROLE)
+        .addGroupsToUser(USER1, GROUP1);
+    policyFile.write(context.getPolicyFile());
+
+    dropDb(ADMIN1, DB1, DB2);
+    createDb(ADMIN1, DB1, DB2);
+
+    Connection connection = context.createConnection(USER1, "password");
+    Statement statement = context.createStatement(connection);
+
+    // a
+    statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL1);
+    statement.execute("create table " + DB1 + "." + TBL1
+        + " (under_col int comment 'the under column', value string)");
+    statement.execute("load data local inpath '" + dataFile.getPath()
+        + "' into table " + DB1 + "." + TBL1);
+    statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW1);
+    statement.execute("CREATE VIEW " + DB1 + "." + VIEW1
+        + " (value) AS SELECT value from " + DB1 + "." + TBL1
+        + " LIMIT 10");
+    statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL1);
+    statement.execute("CREATE TABLE " + DB2 + "." + TBL1
+        + " AS SELECT value from " + DB1 + "." + TBL1
+        + " LIMIT 10");
+
+    // b
+    statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL2);
+    statement.execute("create table " + DB2 + "." + TBL2
+        + " (under_col int comment 'the under column', value string)");
+    statement.execute("load data local inpath '" + dataFile.getPath()
+        + "' into table " + DB2 + "." + TBL2);
+    statement.execute("DROP TABLE IF EXISTS " + DB2 + "." + TBL3);
+    statement.execute("create table " + DB2 + "." + TBL3
+        + " (under_col int comment 'the under column', value string)");
+    statement.execute("load data local inpath '" + dataFile.getPath()
+        + "' into table " + DB2 + "." + TBL3);
+
+    // c
+    context.assertAuthzException(statement, "DROP DATABASE IF EXISTS " + DB1);
+    context.assertAuthzException(statement, "DROP DATABASE IF EXISTS " + DB2);
+
+    policyFile.removePermissionsFromRole(GROUP1_ROLE, ALL_DB2);
+    policyFile.write(context.getPolicyFile());
+
+    // create db1.view1 as select from db2.tbl2
+    statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW2);
+    context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW2 +
+        " (value) AS SELECT value from " + DB2 + "." + TBL2 + " LIMIT 10");
+    // create db1.tbl2 as select from db2.tbl2
+    statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL2);
+    context.assertAuthzException(statement, "CREATE TABLE " + DB1 + "." + TBL2 +
+        " AS SELECT value from " + DB2 + "." + TBL2 + " LIMIT 10");
+
+
+
+    statement.close();
+    connection.close();
+  }
+
+  /**
+   * Steps: 1. admin user create databases, DB_1 and DB_2, no table or other
+   * object in database positive test case:
+   * d) user1 has the privilege to create view on tables in DB_1 negative test case:
+   * e) user1 cannot create view in DB_1 that select from tables in DB_2
+   *  with no select privilege 2.
+   * positive test case:
+   * f) user1 has the privilege to create view to select from DB_1.tb_1
+   *  and DB_2.tb_2 negative test case:
+   * g) user1 cannot create view to select from DB_1.tb_1 and DB_2.tb_3
+   */
+  @Test
+  public void testCrossDbViewOperations() throws Exception {
+    // edit policy file
+    policyFile
+        .addRolesToGroup("group1", "all_db1", "load_data", "select_tb2")
+        .addPermissionsToRole("all_db1", "server=server1->db=db_1")
+        .addPermissionsToRole("all_db2", "server=server1->db=db_2")
+        .addPermissionsToRole("select_tb2", "server=server1->db=db_2->table=tb_1->action=select")
+        .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
+        .addGroupsToUser("user1", "group1");
+    policyFile.write(context.getPolicyFile());
+
+    // admin create two databases
+    dropDb(ADMIN1, DB1, DB2);
+    createDb(ADMIN1, DB1, DB2);
+    Connection connection = context.createConnection(ADMIN1, "password");
+    Statement statement = context.createStatement(connection);
+    statement
+    .execute("CREATE TABLE " + DB1 + "." + TBL1 + "(id int)");
+    statement
+    .execute("CREATE TABLE " + DB2 + "." + TBL1 + "(id int)");
+    statement
+    .execute("CREATE TABLE " + DB2 + "." + TBL2 + "(id int)");
+    context.close();
+
+    connection = context.createConnection("user1", "foo");
+    statement = context.createStatement(connection);
+
+    // d
+    statement.execute("DROP TABLE IF EXISTS " + DB1 + "." + TBL1);
+    statement.execute("create table " + DB1 + "." + TBL1
+        + " (under_col int comment 'the under column', value string)");
+
+    // e
+    statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW1);
+    context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW1
+        + " (value) AS SELECT value from " + DB2 + "." + TBL2
+        + " LIMIT 10");
+    // f
+    statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW2);
+    statement.execute("CREATE VIEW " + DB1 + "." + VIEW2
+        + " (value) AS SELECT value from " + DB1 + "." + TBL1
+        + " LIMIT 10");
+
+    // g
+    statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW3);
+    context.assertAuthzException(statement, "CREATE VIEW " + DB1 + "." + VIEW3
+        + " (value) AS SELECT value from " + DB2 + "." + TBL2
+        + " LIMIT 10");
+  }
+}