You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sp...@apache.org on 2018/06/27 16:47:44 UTC

[17/17] sentry git commit: SENTRY-2282: Remove hive-authzv2 binding and tests modules completely (Sergio Pena, reviewed by Na Li)

SENTRY-2282: Remove hive-authzv2 binding and tests modules completely (Sergio Pena, reviewed by Na Li)


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/e358fde7
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/e358fde7
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/e358fde7

Branch: refs/heads/master
Commit: e358fde7f7a22f6848f464d55644d6eec89b5f88
Parents: 0397fc5
Author: Sergio Pena <se...@cloudera.com>
Authored: Wed Jun 27 11:44:47 2018 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Wed Jun 27 11:44:47 2018 -0500

----------------------------------------------------------------------
 sentry-binding/pom.xml                          |   22 +-
 sentry-binding/sentry-binding-hive-v2/pom.xml   |  163 --
 .../hive/ql/exec/SentryFilterDDLTask.java       |  162 --
 .../binding/hive/authz/SentryConfigTool.java    |  643 -----
 .../hive/v2/HiveAuthzBindingHookBaseV2.java     |  880 -------
 .../binding/hive/v2/HiveAuthzBindingHookV2.java |  157 --
 .../hive/v2/HiveAuthzBindingSessionHookV2.java  |  129 -
 .../hive/v2/HiveAuthzPrivilegesMapV2.java       |  327 ---
 .../hive/v2/SentryAuthorizerFactory.java        |  164 --
 ...entryHiveAuthorizationTaskFactoryImplV2.java |   64 -
 .../hive/v2/SentryHivePrivilegeObject.java      |   32 -
 .../DefaultSentryAccessController.java          |  564 -----
 .../v2/authorizer/DefaultSentryValidator.java   |  477 ----
 .../authorizer/SentryHiveAccessController.java  |  200 --
 .../SentryHiveAuthorizationValidator.java       |   58 -
 .../v2/authorizer/SentryHiveAuthorizer.java     |  192 --
 .../metastore/AuthorizingObjectStoreBaseV2.java |  412 ----
 .../v2/metastore/AuthorizingObjectStoreV2.java  |  412 ----
 .../metastore/MetastoreAuthzBindingBaseV2.java  |  459 ----
 .../v2/metastore/MetastoreAuthzBindingV2.java   |   99 -
 .../metastore/SentryHiveMetaStoreClientV2.java  |  161 --
 .../v2/metastore/SentryMetaStoreFilterHook.java |  201 --
 .../SentryMetastorePostEventListenerBaseV2.java |  416 ----
 .../SentryMetastorePostEventListenerV2.java     |   73 -
 .../hive/v2/util/SentryAuthorizerUtil.java      |  362 ---
 .../hive/v2/util/SimpleSemanticAnalyzer.java    |  372 ---
 .../v2/DummyHiveAuthenticationProvider.java     |   63 -
 sentry-tests/sentry-tests-hive-v2/pom.xml       |  530 -----
 .../dbprovider/AbstractTestWithDbProvider.java  |  166 --
 .../e2e/dbprovider/TestColumnEndToEnd.java      |  417 ----
 .../e2e/dbprovider/TestConcurrentClients.java   |  343 ---
 .../e2e/dbprovider/TestDatabaseProvider.java    | 2215 ------------------
 .../TestDbColumnLevelMetaDataOps.java           |  374 ---
 .../tests/e2e/dbprovider/TestDbComplexView.java |  314 ---
 .../tests/e2e/dbprovider/TestDbConnections.java |  150 --
 .../tests/e2e/dbprovider/TestDbCrossDbOps.java  |   40 -
 .../tests/e2e/dbprovider/TestDbDDLAuditLog.java |  293 ---
 .../tests/e2e/dbprovider/TestDbEndToEnd.java    |  251 --
 .../TestDbExportImportPrivileges.java           |   44 -
 .../e2e/dbprovider/TestDbJDBCInterface.java     |   45 -
 .../TestDbMetadataObjectRetrieval.java          |   44 -
 .../dbprovider/TestDbMetadataPermissions.java   |   39 -
 .../dbprovider/TestDbMovingToProduction.java    |   38 -
 .../tests/e2e/dbprovider/TestDbOperations.java  |   37 -
 .../dbprovider/TestDbPrivilegeAtTransform.java  |   38 -
 .../TestDbPrivilegeCleanupOnDrop.java           |  354 ---
 .../TestDbPrivilegesAtColumnScope.java          |   38 -
 .../TestDbPrivilegesAtDatabaseScope.java        |   46 -
 .../TestDbPrivilegesAtFunctionScope.java        |   39 -
 .../TestDbPrivilegesAtTableScope.java           |   39 -
 .../TestDbRuntimeMetadataRetrieval.java         |   46 -
 .../tests/e2e/dbprovider/TestDbSandboxOps.java  |   49 -
 .../TestDbSentryOnFailureHookLoading.java       |  271 ---
 .../e2e/dbprovider/TestDbUriPermissions.java    |   45 -
 .../e2e/dbprovider/TestGrantUserToRole.java     |  261 ---
 .../TestPrivilegeWithGrantOption.java           |  379 ---
 .../TestPrivilegeWithHAGrantOption.java         |  160 --
 .../tests/e2e/hdfs/TestHDFSIntegration.java     | 1932 ---------------
 .../e2e/hdfs/TestHDFSIntegrationWithHA.java     |   28 -
 .../e2e/hive/AbstractTestWithHiveServer.java    |   94 -
 .../AbstractTestWithStaticConfiguration.java    |  723 ------
 .../apache/sentry/tests/e2e/hive/Context.java   |  321 ---
 .../e2e/hive/DummySentryOnFailureHook.java      |   44 -
 .../sentry/tests/e2e/hive/PolicyFileEditor.java |   78 -
 .../tests/e2e/hive/PrivilegeResultSet.java      |  124 -
 .../sentry/tests/e2e/hive/StaticUserGroup.java  |   55 -
 .../sentry/tests/e2e/hive/TestConfigTool.java   |  346 ---
 .../sentry/tests/e2e/hive/TestCrossDbOps.java   |  669 ------
 .../e2e/hive/TestCustomSerdePrivileges.java     |  120 -
 .../sentry/tests/e2e/hive/TestEndToEnd.java     |  128 -
 .../e2e/hive/TestExportImportPrivileges.java    |  162 --
 .../tests/e2e/hive/TestJDBCInterface.java       |  228 --
 .../tests/e2e/hive/TestLockPrivileges.java      |  214 --
 .../e2e/hive/TestMetadataObjectRetrieval.java   |  501 ----
 .../tests/e2e/hive/TestMetadataPermissions.java |  128 -
 .../tests/e2e/hive/TestMovingToProduction.java  |  220 --
 .../sentry/tests/e2e/hive/TestOperations.java   | 1289 ----------
 .../tests/e2e/hive/TestPerDBConfiguration.java  |  408 ----
 .../e2e/hive/TestPerDatabasePolicyFile.java     |  118 -
 .../tests/e2e/hive/TestPolicyImportExport.java  |  196 --
 .../e2e/hive/TestPrivilegeAtTransform.java      |  118 -
 .../e2e/hive/TestPrivilegesAtColumnScope.java   |  518 ----
 .../e2e/hive/TestPrivilegesAtDatabaseScope.java |  399 ----
 .../e2e/hive/TestPrivilegesAtFunctionScope.java |  262 ---
 .../e2e/hive/TestPrivilegesAtTableScope.java    |  662 ------
 .../tests/e2e/hive/TestReloadPrivileges.java    |   54 -
 .../e2e/hive/TestRuntimeMetadataRetrieval.java  |  429 ----
 .../sentry/tests/e2e/hive/TestSandboxOps.java   |  529 -----
 .../hive/TestSentryOnFailureHookLoading.java    |  134 --
 .../tests/e2e/hive/TestServerConfiguration.java |  265 ---
 .../tests/e2e/hive/TestUriPermissions.java      |  262 ---
 .../tests/e2e/hive/TestUserManagement.java      |  383 ---
 .../tests/e2e/hive/TestViewPrivileges.java      |  138 --
 .../sentry/tests/e2e/hive/fs/AbstractDFS.java   |   87 -
 .../sentry/tests/e2e/hive/fs/ClusterDFS.java    |   69 -
 .../apache/sentry/tests/e2e/hive/fs/DFS.java    |   32 -
 .../sentry/tests/e2e/hive/fs/DFSFactory.java    |   49 -
 .../sentry/tests/e2e/hive/fs/MiniDFS.java       |   94 -
 .../e2e/hive/hiveserver/AbstractHiveServer.java |   95 -
 .../e2e/hive/hiveserver/EmbeddedHiveServer.java |   60 -
 .../e2e/hive/hiveserver/ExternalHiveServer.java |  124 -
 .../tests/e2e/hive/hiveserver/HiveServer.java   |   34 -
 .../e2e/hive/hiveserver/HiveServerFactory.java  |  296 ---
 .../e2e/hive/hiveserver/InternalHiveServer.java |   47 -
 .../hiveserver/InternalMetastoreServer.java     |   80 -
 .../hive/hiveserver/UnmanagedHiveServer.java    |  113 -
 ...actMetastoreTestWithStaticConfiguration.java |  218 --
 .../metastore/SentryPolicyProviderForDb.java    |  165 --
 .../metastore/TestAuthorizingObjectStore.java   | 1106 ---------
 .../e2e/metastore/TestMetaStoreWithPigHCat.java |  113 -
 .../e2e/metastore/TestMetastoreEndToEnd.java    |  628 -----
 .../tests/e2e/minisentry/InternalSentrySrv.java |  235 --
 .../sentry/tests/e2e/minisentry/SentrySrv.java  |   86 -
 .../tests/e2e/minisentry/SentrySrvFactory.java  |   45 -
 .../resources/core-site-for-sentry-test.xml     |   34 -
 .../src/test/resources/emp.dat                  |   12 -
 .../src/test/resources/hadoop                   |  107 -
 .../src/test/resources/kv1.dat                  |  500 ----
 .../src/test/resources/log4j.properties         |   35 -
 .../src/test/resources/log4j2.properties        |   53 -
 .../src/test/resources/sentry-provider.ini      |   25 -
 .../src/test/resources/sentry-site.xml          |   33 -
 .../src/test/resources/testPolicyImport.ini     |   25 -
 .../test/resources/testPolicyImportAdmin.ini    |   22 -
 .../test/resources/testPolicyImportError.ini    |   21 -
 .../scale-test/create-many-dbs-tables.sh        |  277 ---
 126 files changed, 1 insertion(+), 31567 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml
index 17b0f1a..fd5e28b 100644
--- a/sentry-binding/pom.xml
+++ b/sentry-binding/pom.xml
@@ -37,26 +37,6 @@ limitations under the License.
     <module>sentry-binding-hbase-indexer</module>
     <module>sentry-binding-hive-conf</module>
     <module>sentry-binding-hive-follower</module>
+    <module>sentry-binding-hive</module>
   </modules>
-
-  <profiles>
-    <profile>
-      <id>hive-authz1</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <modules>
-        <module>sentry-binding-hive</module>
-      </modules>
-    </profile>
-    <profile>
-      <id>hive-authz2</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <modules>
-        <module>sentry-binding-hive-v2</module>
-      </modules>
-    </profile>
-  </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml
deleted file mode 100644
index f77e64e..0000000
--- a/sentry-binding/sentry-binding-hive-v2/pom.xml
+++ /dev/null
@@ -1,163 +0,0 @@
-<?xml version="1.0"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.sentry</groupId>
-    <artifactId>sentry-binding</artifactId>
-    <version>2.1.0-SNAPSHOT</version>
-  </parent>
-
-  <artifactId>sentry-binding-hive-v2</artifactId>
-  <name>Sentry Binding v2 for Hive</name>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-binding-hive-common</artifactId>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.thrift</groupId>
-      <artifactId>libthrift</artifactId>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derby</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-service</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-metastore</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-shims</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-serde</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-common</artifactId>
-      <version>${hive.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-core-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-core-model-db</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-file</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-policy-engine</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-client</artifactId>
-      <version>${hadoop.version}</version>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-core</artifactId>
-      <version>${datanucleus-core.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-api-jdo</artifactId>
-      <version>${datanucleus-api-jdo.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-rdbms</artifactId>
-      <version>${datanucleus-rdbms.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>javax.jdo</artifactId>
-      <version>${datanucleus-jdo.version}</version>
-    </dependency>
-  </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
deleted file mode 100644
index 0b1acf1..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.exec;
-
-import static org.apache.hadoop.util.StringUtils.stringifyException;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.core.common.Subject;
-
-import com.google.common.base.Preconditions;
-
-public class SentryFilterDDLTask extends DDLTask {
-  private static final long serialVersionUID = 1L;
-  private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class);
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private Subject subject;
-  private HiveOperation stmtOperation;
-
-  public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject,
-      HiveOperation stmtOperation) {
-    Preconditions.checkNotNull(hiveAuthzBinding);
-    Preconditions.checkNotNull(subject);
-    Preconditions.checkNotNull(stmtOperation);
-
-    this.hiveAuthzBinding = hiveAuthzBinding;
-    this.subject = subject;
-    this.stmtOperation = stmtOperation;
-  }
-
-  public HiveAuthzBinding getHiveAuthzBinding() {
-    return hiveAuthzBinding;
-  }
-
-  public Subject getSubject() {
-    return subject;
-  }
-
-  public HiveOperation getStmtOperation() {
-    return stmtOperation;
-  }
-
-  @Override
-  public int execute(DriverContext driverContext) {
-    // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command.
-    ShowColumnsDesc showCols = work.getShowColumnsDesc();
-    try {
-      if (showCols != null) {
-        return showFilterColumns(showCols);
-      }
-    } catch (Throwable e) {
-      failed(e);
-      return 1;
-    }
-
-    return super.execute(driverContext);
-  }
-
-  private void failed(Throwable e) {
-    // Get the cause of the exception if available
-    Throwable error = e;
-    while (error.getCause() != null && error.getClass() == RuntimeException.class) {
-      error = error.getCause();
-    }
-    setException(error);
-    LOG.error(stringifyException(error));
-  }
-
-  /**
-   * Filter the command "show columns in table"
-   *
-   */
-  private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException {
-    Table table = Hive.get(conf).getTable(showCols.getTableName());
-
-    // write the results in the file
-    DataOutputStream outStream = null;
-    try {
-      Path resFile = new Path(showCols.getResFile());
-      FileSystem fs = resFile.getFileSystem(conf);
-      outStream = fs.create(resFile);
-
-      List<FieldSchema> cols = table.getCols();
-      cols.addAll(table.getPartCols());
-      // In case the query is served by HiveServer2, don't pad it with spaces,
-      // as HiveServer2 output is consumed by JDBC/ODBC clients.
-      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
-      outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(
-          fiterColumns(cols, table), false, isOutputPadded, null));
-      outStream.close();
-      outStream = null;
-    } catch (IOException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-    return 0;
-  }
-
-  private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException {
-    // filter some columns that the subject has privilege on
-    return HiveAuthzBindingHookBaseV2.filterShowColumns(getHiveAuthzBinding(),
-        cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName());
-  }
-
-  public void copyDDLTask(DDLTask ddlTask) {
-    work = ddlTask.getWork();
-    rootTask = ddlTask.isRootTask();
-    childTasks = ddlTask.getChildTasks();
-    parentTasks = ddlTask.getParentTasks();
-    backupTask = ddlTask.getBackupTask();
-    backupChildrenTasks = ddlTask.getBackupChildrenTasks();
-    started = ddlTask.started();
-    isdone = ddlTask.done();
-    queued = ddlTask.getQueued();
-    id = ddlTask.getId();
-    taskCounters = ddlTask.getCounters();
-    feedSubscribers = ddlTask.getFeedSubscribers();
-    taskTag = ddlTask.getTaskTag();
-    setLocalMode(ddlTask.isLocalMode());
-    setRetryCmdWhenFail(ddlTask.ifRetryCmdWhenFail());
-    queryPlan = ddlTask.getQueryPlan();
-    jobID = ddlTask.getJobID();
-    setException(ddlTask.getException());
-    console = ddlTask.console;
-    setFetchSource(ddlTask.isFetchSource());
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
deleted file mode 100644
index f6b4518..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
+++ /dev/null
@@ -1,643 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive.authz;
-
-import java.security.CodeSource;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.Parser;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.sentry.Command;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.SentryPolicyFileFormatFactory;
-import org.apache.sentry.binding.hive.SentryPolicyFileFormatter;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.core.common.exception.SentryConfigurationException;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.provider.common.AuthorizationProvider;
-import org.apache.sentry.api.service.thrift.SentryPolicyServiceClient;
-import org.apache.sentry.service.thrift.SentryServiceClientFactory;
-
-/**
- * set the required system property to be read by HiveConf and AuthzConf
- *
- * @throws Exception
- */
-// Hack, hiveConf doesn't provide a reliable way check if it found a valid
-// hive-site
-// load auth provider
-// get the configured sentry provider
-// validate policy files
-// import policy files
-public class SentryConfigTool {
-  private String sentrySiteFile = null;
-  private String policyFile = null;
-  private String query = null;
-  private String jdbcURL = null;
-  private String user = null;
-  private String passWord = null;
-  private String importPolicyFilePath = null;
-  private String exportPolicyFilePath = null;
-  private String objectPath = null;
-  private boolean listPrivs = false;
-  private boolean validate = false;
-  private boolean importOverwriteRole = false;
-  private HiveConf hiveConf = null;
-  private HiveAuthzConf authzConf = null;
-  private AuthorizationProvider sentryProvider = null;
-
-  public SentryConfigTool() {
-
-  }
-
-  public AuthorizationProvider getSentryProvider() {
-    return sentryProvider;
-  }
-
-  public void setSentryProvider(AuthorizationProvider sentryProvider) {
-    this.sentryProvider = sentryProvider;
-  }
-
-  public HiveConf getHiveConf() {
-    return hiveConf;
-  }
-
-  public void setHiveConf(HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-  }
-
-  public HiveAuthzConf getAuthzConf() {
-    return authzConf;
-  }
-
-  public void setAuthzConf(HiveAuthzConf authzConf) {
-    this.authzConf = authzConf;
-  }
-
-  public boolean isValidate() {
-    return validate;
-  }
-
-  public void setValidate(boolean validate) {
-    this.validate = validate;
-  }
-
-  public String getImportPolicyFilePath() {
-    return importPolicyFilePath;
-  }
-
-  public void setImportPolicyFilePath(String importPolicyFilePath) {
-    this.importPolicyFilePath = importPolicyFilePath;
-  }
-
-  public String getObjectPath() {
-    return objectPath;
-  }
-
-  public void setObjectPath(String objectPath) {
-    this.objectPath = objectPath;
-  }
-
-  public String getExportPolicyFilePath() {
-    return exportPolicyFilePath;
-  }
-
-  public void setExportPolicyFilePath(String exportPolicyFilePath) {
-    this.exportPolicyFilePath = exportPolicyFilePath;
-  }
-
-  public String getSentrySiteFile() {
-    return sentrySiteFile;
-  }
-
-  public void setSentrySiteFile(String sentrySiteFile) {
-    this.sentrySiteFile = sentrySiteFile;
-  }
-
-  public String getPolicyFile() {
-    return policyFile;
-  }
-
-  public void setPolicyFile(String policyFile) {
-    this.policyFile = policyFile;
-  }
-
-  public String getQuery() {
-    return query;
-  }
-
-  public void setQuery(String query) {
-    this.query = query;
-  }
-
-  public String getJdbcURL() {
-    return jdbcURL;
-  }
-
-  public void setJdbcURL(String jdbcURL) {
-    this.jdbcURL = jdbcURL;
-  }
-
-  public String getUser() {
-    return user;
-  }
-
-  public void setUser(String user) {
-    this.user = user;
-  }
-
-  public String getPassWord() {
-    return passWord;
-  }
-
-  public void setPassWord(String passWord) {
-    this.passWord = passWord;
-  }
-
-  public boolean isListPrivs() {
-    return listPrivs;
-  }
-
-  public void setListPrivs(boolean listPrivs) {
-    this.listPrivs = listPrivs;
-  }
-
-  public boolean isImportOverwriteRole() {
-    return importOverwriteRole;
-  }
-
-  public void setImportOverwriteRole(boolean importOverwriteRole) {
-    this.importOverwriteRole = importOverwriteRole;
-  }
-
-  /**
-   * set the required system property to be read by HiveConf and AuthzConf
-   * @throws Exception
-   */
-  public void setupConfig() throws Exception {
-    System.out.println("Configuration: ");
-    CodeSource src = SentryConfigTool.class.getProtectionDomain()
-        .getCodeSource();
-    if (src != null) {
-      System.out.println("Sentry package jar: " + src.getLocation());
-    }
-
-    if (getPolicyFile() != null) {
-      System.setProperty(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(),
-          getPolicyFile());
-    }
-    System.setProperty(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true");
-    setHiveConf(new HiveConf(SessionState.class));
-    getHiveConf().setVar(ConfVars.SEMANTIC_ANALYZER_HOOK,
-        HiveAuthzBindingHookBaseV2.class.getName());
-    try {
-      System.out.println("Hive config: " + HiveConf.getHiveSiteLocation());
-    } catch (NullPointerException e) {
-      // Hack, hiveConf doesn't provide a reliable way check if it found a valid
-      // hive-site
-      throw new SentryConfigurationException("Didn't find a hive-site.xml");
-
-    }
-
-    if (getSentrySiteFile() != null) {
-      getHiveConf()
-          .set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, getSentrySiteFile());
-    }
-
-    setAuthzConf(HiveAuthzConf.getAuthzConf(getHiveConf()));
-    System.out.println("Sentry config: "
-        + getAuthzConf().getHiveAuthzSiteFile());
-    System.out.println("Sentry Policy: "
-        + getAuthzConf().get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar()));
-    System.out.println("Sentry server: "
-        + getAuthzConf().get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-
-    setSentryProvider(getAuthorizationProvider());
-  }
-
-  // load auth provider
-  private AuthorizationProvider getAuthorizationProvider()
-      throws IllegalStateException, SentryConfigurationException {
-    String serverName = new Server(getAuthzConf().get(
-        AuthzConfVars.AUTHZ_SERVER_NAME.getVar())).getName();
-    // get the configured sentry provider
-    try {
-      return HiveAuthzBinding.getAuthProvider(getHiveConf(),
-          authzConf, serverName);
-    } catch (SentryConfigurationException eC) {
-      printConfigErrors(eC);
-      throw eC;
-    } catch (Exception e) {
-      throw new IllegalStateException("Couldn't load sentry provider ", e);
-    }
-  }
-
-  // validate policy files
-  public void validatePolicy() throws Exception {
-    try {
-      getSentryProvider().validateResource(true);
-    } catch (SentryConfigurationException e) {
-      printConfigErrors(e);
-      throw e;
-    }
-    System.out.println("No errors found in the policy file");
-  }
-
-  // import the sentry mapping data to database
-  public void importPolicy() throws Exception {
-    String requestorUserName = System.getProperty("user.name", "");
-    // get the FileFormatter according to the configuration
-    SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
-        .createFileFormatter(authzConf);
-    // parse the input file, get the mapping data in map structure
-    Map<String, Map<String, Set<String>>> policyFileMappingData = sentryPolicyFileFormatter.parse(
-        importPolicyFilePath, authzConf);
-    // todo: here should be an validator to check the data's value, format, hierarchy
-    try(SentryPolicyServiceClient client =
-                SentryServiceClientFactory.create(getAuthzConf())) {
-      // import the mapping data to database
-      client.importPolicy(policyFileMappingData, requestorUserName, importOverwriteRole);
-    }
-  }
-
-  // export the sentry mapping data to file
-  public void exportPolicy() throws Exception {
-    String requestorUserName = System.getProperty("user.name", "");
-    try (SentryPolicyServiceClient client =
-                SentryServiceClientFactory.create(getAuthzConf())) {
-      // export the sentry mapping data from database to map structure
-      Map<String, Map<String, Set<String>>> policyFileMappingData = client
-              .exportPolicy(requestorUserName, objectPath);
-      // get the FileFormatter according to the configuration
-      SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
-              .createFileFormatter(authzConf);
-      // write the sentry mapping data to exportPolicyFilePath with the data in map structure
-      sentryPolicyFileFormatter.write(exportPolicyFilePath, policyFileMappingData);
-    }
-  }
-
-  // list permissions for given user
-  public void listPrivs() throws Exception {
-    getSentryProvider().validateResource(true);
-    System.out.println("Available privileges for user " + getUser() + ":");
-    Set<String> permList = getSentryProvider().listPrivilegesForSubject(
-        new Subject(getUser()));
-    for (String perms : permList) {
-      System.out.println("\t" + perms);
-    }
-    if (permList.isEmpty()) {
-      System.out.println("\t*** No permissions available ***");
-    }
-  }
-
-  // Verify the given query
-  public void verifyLocalQuery(String queryStr) throws Exception {
-    // setup Hive driver
-    SessionState session = new SessionState(getHiveConf());
-    SessionState.start(session);
-    Driver driver = new Driver(session.getConf(), getUser());
-
-    // compile the query
-    CommandProcessorResponse compilerStatus = driver
-        .compileAndRespond(queryStr);
-    if (compilerStatus.getResponseCode() != 0) {
-      String errMsg = compilerStatus.getErrorMessage();
-      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
-        printMissingPerms(getHiveConf().get(
-            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
-      }
-      throw new SemanticException("Compilation error: "
-          + compilerStatus.getErrorMessage());
-    }
-    driver.close();
-    System.out
-        .println("User " + getUser() + " has privileges to run the query");
-  }
-
-  // connect to remote HS2 and run mock query
-  public void verifyRemoteQuery(String queryStr) throws Exception {
-    Class.forName("org.apache.hive.jdbc.HiveDriver");
-    Connection conn = DriverManager.getConnection(getJdbcURL(), getUser(),
-        getPassWord());
-    Statement stmt = conn.createStatement();
-    if (!isSentryEnabledOnHiveServer(stmt)) {
-      throw new IllegalStateException("Sentry is not enabled on HiveServer2");
-    }
-    stmt.execute("set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + "=true");
-    try {
-      stmt.execute(queryStr);
-    } catch (SQLException e) {
-      String errMsg = e.getMessage();
-      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR)) {
-        System.out.println("User "
-            + readConfig(stmt, HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME)
-            + " has privileges to run the query");
-        return;
-      } else if (errMsg
-          .contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
-        printMissingPerms(readConfig(stmt,
-            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
-        throw e;
-      } else {
-        throw e;
-      }
-    } finally {
-      if (!stmt.isClosed()) {
-        stmt.close();
-      }
-      conn.close();
-    }
-
-  }
-
-  // verify senty session hook is set
-  private boolean isSentryEnabledOnHiveServer(Statement stmt)
-      throws SQLException {
-    String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase();
-    return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase())
-        && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase());
-  }
-
-  // read a config value using 'set' statement
-  private String readConfig(Statement stmt, String configKey)
-      throws SQLException {
-    try (ResultSet res = stmt.executeQuery("set " + configKey)) {
-      if (!res.next()) {
-        return null;
-      }
-      // parse key=value result format
-      String result = res.getString(1);
-      res.close();
-      return result.substring(result.indexOf("=") + 1);
-    }
-  }
-
-  // print configuration/policy file errors and warnings
-  private void printConfigErrors(SentryConfigurationException configException)
-      throws SentryConfigurationException {
-    System.out.println(" *** Found configuration problems *** ");
-    for (String errMsg : configException.getConfigErrors()) {
-      System.out.println("ERROR: " + errMsg);
-    }
-    for (String warnMsg : configException.getConfigWarnings()) {
-      System.out.println("Warning: " + warnMsg);
-    }
-  }
-
-  // extract the authorization errors from config property and print
-  private void printMissingPerms(String errMsg) {
-    if (errMsg == null || errMsg.isEmpty()) {
-      return;
-    }
-    System.out.println("*** Query compilation failed ***");
-    String perms[] = errMsg.replaceFirst(
-        ".*" + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE, "")
-        .split(";");
-    System.out.println("Required privileges for given query:");
-    for (int count = 0; count < perms.length; count++) {
-      System.out.println(" \t " + perms[count]);
-    }
-  }
-
-  // print usage
-  private void usage(Options sentryOptions) {
-    HelpFormatter formatter = new HelpFormatter();
-    formatter.printHelp("sentry --command config-tool", sentryOptions);
-    System.exit(-1);
-  }
-
-  /**
-   * parse arguments
-   *
-   * <pre>
-   *   -d,--debug                  Enable debug output
-   *   -e,--query <arg>            Query privilege verification, requires -u
-   *   -h,--help                   Print usage
-   *   -i,--policyIni <arg>        Policy file path
-   *   -j,--jdbcURL <arg>          JDBC URL
-   *   -l,--listPrivs,--listPerms  List privilges for given user, requires -u
-   *   -p,--password <arg>         Password
-   *   -s,--sentry-site <arg>      sentry-site file path
-   *   -u,--user <arg>             user name
-   *   -v,--validate               Validate policy file
-   *   -I,--import                 Import policy file
-   *   -E,--export                 Export policy file
-   *   -o,--overwrite              Overwrite the exist role data when do the import
-   *   -b,--objectPath             The path of the object whose privileges will be exported
-   * </pre>
-   *
-   * @param args
-   */
-  private void parseArgs(String[] args) {
-    boolean enableDebug = false;
-
-    Options sentryOptions = new Options();
-
-    Option helpOpt = new Option("h", "help", false, "Print usage");
-    helpOpt.setRequired(false);
-
-    Option validateOpt = new Option("v", "validate", false,
-        "Validate policy file");
-    validateOpt.setRequired(false);
-
-    Option queryOpt = new Option("e", "query", true,
-        "Query privilege verification, requires -u");
-    queryOpt.setRequired(false);
-
-    Option listPermsOpt = new Option("l", "listPerms", false,
-        "list permissions for given user, requires -u");
-    listPermsOpt.setRequired(false);
-    Option listPrivsOpt = new Option("listPrivs", false,
-        "list privileges for given user, requires -u");
-    listPrivsOpt.setRequired(false);
-
-    Option importOpt = new Option("I", "import", true,
-        "Import policy file");
-    importOpt.setRequired(false);
-
-    Option exportOpt = new Option("E", "export", true, "Export policy file");
-    exportOpt.setRequired(false);
-    // required args
-    OptionGroup sentryOptGroup = new OptionGroup();
-    sentryOptGroup.addOption(helpOpt);
-    sentryOptGroup.addOption(validateOpt);
-    sentryOptGroup.addOption(queryOpt);
-    sentryOptGroup.addOption(listPermsOpt);
-    sentryOptGroup.addOption(listPrivsOpt);
-    sentryOptGroup.addOption(importOpt);
-    sentryOptGroup.addOption(exportOpt);
-    sentryOptGroup.setRequired(true);
-    sentryOptions.addOptionGroup(sentryOptGroup);
-
-    // optional args
-    Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL");
-    jdbcArg.setRequired(false);
-    sentryOptions.addOption(jdbcArg);
-
-    Option sentrySitePath = new Option("s", "sentry-site", true,
-        "sentry-site file path");
-    sentrySitePath.setRequired(false);
-    sentryOptions.addOption(sentrySitePath);
-
-    Option globalPolicyPath = new Option("i", "policyIni", true,
-        "Policy file path");
-    globalPolicyPath.setRequired(false);
-    sentryOptions.addOption(globalPolicyPath);
-
-    Option userOpt = new Option("u", "user", true, "user name");
-    userOpt.setRequired(false);
-    sentryOptions.addOption(userOpt);
-
-    Option passWordOpt = new Option("p", "password", true, "Password");
-    userOpt.setRequired(false);
-    sentryOptions.addOption(passWordOpt);
-
-    Option debugOpt = new Option("d", "debug", false, "enable debug output");
-    debugOpt.setRequired(false);
-    sentryOptions.addOption(debugOpt);
-
-    Option overwriteOpt = new Option("o", "overwrite", false, "enable import overwrite");
-    overwriteOpt.setRequired(false);
-    sentryOptions.addOption(overwriteOpt);
-
-    Option objectPathOpt = new Option("b", "objectPath",
-        false, "The path of the object whose privileges will be exported");
-    objectPathOpt.setRequired(false);
-    sentryOptions.addOption(objectPathOpt);
-
-    try {
-      Parser parser = new GnuParser();
-      CommandLine cmd = parser.parse(sentryOptions, args);
-
-      for (Option opt : cmd.getOptions()) {
-        if (opt.getOpt().equals("s")) {
-          setSentrySiteFile(opt.getValue());
-        } else if (opt.getOpt().equals("i")) {
-          setPolicyFile(opt.getValue());
-        } else if (opt.getOpt().equals("e")) {
-          setQuery(opt.getValue());
-        } else if (opt.getOpt().equals("j")) {
-          setJdbcURL(opt.getValue());
-        } else if (opt.getOpt().equals("u")) {
-          setUser(opt.getValue());
-        } else if (opt.getOpt().equals("p")) {
-          setPassWord(opt.getValue());
-        } else if (opt.getOpt().equals("l") || opt.getOpt().equals("listPrivs")) {
-          setListPrivs(true);
-        } else if (opt.getOpt().equals("v")) {
-          setValidate(true);
-        } else if (opt.getOpt().equals("I")) {
-          setImportPolicyFilePath(opt.getValue());
-        } else if (opt.getOpt().equals("E")) {
-          setExportPolicyFilePath(opt.getValue());
-        } else if (opt.getOpt().equals("h")) {
-          usage(sentryOptions);
-        } else if (opt.getOpt().equals("d")) {
-          enableDebug = true;
-        } else if (opt.getOpt().equals("o")) {
-          setImportOverwriteRole(true);
-        } else if (opt.getOpt().equals("b")) {
-          setObjectPath(opt.getValue());
-        }
-      }
-
-      if (isListPrivs() && getUser() == null) {
-        throw new ParseException("Can't use -l without -u ");
-      }
-      if (getQuery() != null && getUser() == null) {
-        throw new ParseException("Must use -u with -e ");
-      }
-    } catch (ParseException e1) {
-      usage(sentryOptions);
-    }
-
-    if (!enableDebug) {
-      // turn off log
-      LogManager.getRootLogger().setLevel(Level.OFF);
-    }
-  }
-
-  public static class CommandImpl implements Command {
-    @Override
-    public void run(String[] args) throws Exception {
-      SentryConfigTool sentryTool = new SentryConfigTool();
-
-      try {
-        // parse arguments
-        sentryTool.parseArgs(args);
-
-        // load configuration
-        sentryTool.setupConfig();
-
-        // validate configuration
-        if (sentryTool.isValidate()) {
-          sentryTool.validatePolicy();
-        }
-
-        if (!StringUtils.isEmpty(sentryTool.getImportPolicyFilePath())) {
-          sentryTool.importPolicy();
-        }
-
-        if (!StringUtils.isEmpty(sentryTool.getExportPolicyFilePath())) {
-          sentryTool.exportPolicy();
-        }
-
-        // list permissions for give user
-        if (sentryTool.isListPrivs()) {
-          sentryTool.listPrivs();
-        }
-
-        // verify given query
-        if (sentryTool.getQuery() != null) {
-          if (sentryTool.getJdbcURL() != null) {
-            sentryTool.verifyRemoteQuery(sentryTool.getQuery());
-          } else {
-            sentryTool.verifyLocalQuery(sentryTool.getQuery());
-          }
-        }
-      } catch (Exception e) {
-        System.out.println("Sentry tool reported Errors: " + e.getMessage());
-        e.printStackTrace(System.out);
-        System.exit(1);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookBaseV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookBaseV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookBaseV2.java
deleted file mode 100644
index 5a21dd3..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookBaseV2.java
+++ /dev/null
@@ -1,880 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive;
-
-import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-
-import java.io.Serializable;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.net.URL;
-import java.security.CodeSource;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Set;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.Entity;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
-import org.apache.hadoop.hive.ql.hooks.Hook;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Column;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Table;
-import org.apache.sentry.provider.cache.PrivilegeCache;
-import org.apache.sentry.provider.cache.SimplePrivilegeCache;
-import org.apache.sentry.provider.common.AuthorizationProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Splitter;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-public abstract class HiveAuthzBindingHookBaseV2 extends AbstractSemanticAnalyzerHook {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(HiveAuthzBindingHookBaseV2.class);
-  protected final HiveAuthzBinding hiveAuthzBinding;
-  protected final HiveAuthzConf authzConf;
-  protected Database currDB = Database.ALL;
-  protected Table currTab;
-  protected List<AccessURI> udfURIs;
-  protected AccessURI serdeURI;
-  protected AccessURI partitionURI;
-  protected Table currOutTab = null;
-  protected Database currOutDB = null;
-  protected final List<String> serdeWhiteList;
-  protected boolean serdeURIPrivilegesEnabled;
-
-  protected final static HiveAuthzPrivileges columnMetaDataPrivilege =
-      new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
-          .addInputObjectPriviledge(AuthorizableType.Column,
-              EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT))
-          .setOperationScope(HiveOperationScope.COLUMN).setOperationType(HiveOperationType.INFO)
-          .build();
-
-  // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants
-  // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same
-  // HiveOperationType, but we want to enforces different privileges on each statement.
-  // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants
-  // require table-level privileges.
-  protected boolean isDescTableBasic = false;
-
-  public HiveAuthzBindingHookBaseV2() throws Exception {
-    SessionState session = SessionState.get();
-    if(session == null) {
-      throw new IllegalStateException("Session has not been started");
-    }
-    // HACK: set a random classname to force the Auth V2 in Hive
-    SessionState.get().setAuthorizer(null);
-
-    HiveConf hiveConf = session.getConf();
-    if(hiveConf == null) {
-      throw new IllegalStateException("Session HiveConf is null");
-    }
-    authzConf = loadAuthzConf(hiveConf);
-    udfURIs = Lists.newArrayList();
-    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
-    String serdeWhiteLists =
-        authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
-            HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
-    serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
-    serdeURIPrivilegesEnabled =
-        authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
-            HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
-
-    FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
-  }
-
-  public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
-    boolean depreicatedConfigFile = false;
-    HiveAuthzConf newAuthzConf = null;
-    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
-      depreicatedConfigFile = true;
-    }
-
-    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-          + " value '" + hiveAuthzConf + "' is invalid.");
-    }
-    try {
-      newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-    } catch (MalformedURLException e) {
-      if (depreicatedConfigFile) {
-        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
-      } else {
-        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
-      }
-    }
-    return newAuthzConf;
-  }
-
-  @Override
-  public abstract ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
-      throws SemanticException;
-
-  /**
-   * Post analyze hook that invokes hive auth bindings
-   */
-  @Override
-  public abstract void postAnalyze(HiveSemanticAnalyzerHookContext context,
-      List<Task<? extends Serializable>> rootTasks) throws SemanticException;
-
-  protected void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context,
-      HiveOperation hiveOp, AuthorizationException e) {
-    SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
-        context.getCommand(), context.getInputs(), context.getOutputs(),
-        hiveOp, currDB, currTab, udfURIs, null, context.getUserName(),
-        context.getIpAddress(), e, context.getConf());
-    String csHooks = authzConf.get(
-        HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
-
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook)aofh).run(hookCtx);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-
-  /**
-   * The command 'create function ... using jar <jar resources>' can create a function
-   * with the supplied jar resources in the command, which is translated into ASTNode being
-   * [functionName functionClass resourceList] and resourceList being [resourceType resourcePath].
-   * This function collects all the jar paths for the supplied jar resources.
-   *
-   * @param ast the AST node for the command
-   * @return    the jar path list if any or an empty list
-   */
-  protected List<String> getFunctionJars(ASTNode ast) {
-    ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
-
-    List<String> resources = new ArrayList<String>();
-    if (resourcesNode != null) {
-      for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
-        ASTNode resNode = (ASTNode) resourcesNode.getChild(idx);
-        ASTNode resTypeNode = (ASTNode) resNode.getChild(0);
-        ASTNode resUriNode = (ASTNode) resNode.getChild(1);
-        if (resTypeNode.getType() == HiveParser.TOK_JAR) {
-          resources.add(PlanUtils.stripQuotes(resUriNode.getText()));
-        }
-      }
-    }
-
-    return resources;
-  }
-
-  @VisibleForTesting
-  protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
-    for (int i = 0; i < ast.getChildCount(); i++) {
-      ASTNode child = (ASTNode)ast.getChild(i);
-      if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
-          child.getChildCount() == 1) {
-        return parseURI(BaseSemanticAnalyzer.
-          unescapeSQLString(child.getChild(0).getText()));
-      }
-    }
-    return null;
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri) throws SemanticException {
-    return parseURI(uri, false);
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri, boolean isLocal)
-      throws SemanticException {
-    try {
-      HiveConf conf = SessionState.get().getConf();
-      String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-      Path warehousePath = new Path(warehouseDir);
-
-      // If warehousePath is an absolute path and a scheme is null and authority is null as well,
-      // qualified it with default file system scheme and authority.
-      if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
-        URI defaultUri = FileSystem.getDefaultUri(conf);
-        warehousePath = warehousePath.makeQualified(defaultUri, warehousePath);
-        warehouseDir = warehousePath.toUri().toString();
-      }
-      return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
-    } catch (Exception e) {
-      throw new SemanticException("Error parsing URI " + uri + ": " +
-        e.getMessage(), e);
-    }
-  }
-
-  // Find the current database for session
-  protected Database getCanonicalDb() {
-    return new Database(SessionState.get().getCurrentDatabase());
-  }
-
-  protected void extractDbTableNameFromTOKTABLE(ASTNode astNode) throws SemanticException{
-    String[] fqTableName = BaseSemanticAnalyzer.getQualifiedTableName(astNode);
-    Preconditions.checkArgument(fqTableName.length == 2, "BaseSemanticAnalyzer.getQualifiedTableName should return " +
-            "an array with dbName and tableName");
-    currOutDB = new Database(fqTableName[0]);
-    currOutTab = new Table(fqTableName[1]);
-  }
-
-  /*TODO: Deprecate */
-  protected Database extractDatabase(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Database(tableName.split("\\.")[0]);
-    } else {
-      return getCanonicalDb();
-    }
-  }
-  /*TODO: Deprecate */
-  protected Table extractTable(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Table(tableName.split("\\.")[1]);
-    } else {
-      return new Table(tableName);
-    }
-  }
-
-  public static void runFailureHook(SentryOnFailureHookContext hookContext,
-      String csHooks) {
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook) aofh).run(hookContext);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-  /**
-   * Convert the input/output entities into authorizables. generate
-   * authorizables for cases like Database and metadata operations where the
-   * compiler doesn't capture entities. invoke the hive binding to validate
-   * permissions
-   *
-   * @param context
-   * @param stmtAuthObject
-   * @param stmtOperation
-   * @throws AuthorizationException
-   */
-  protected void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
-      HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws  AuthorizationException {
-    Set<ReadEntity> inputs = context.getInputs();
-    Set<WriteEntity> outputs = context.getOutputs();
-    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
-      LOG.debug("context.getInputs() = " + context.getInputs());
-      LOG.debug("context.getOutputs() = " + context.getOutputs());
-    }
-
-    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
-    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
-    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column
-    // level privs.
-    if (isDescTableBasic) {
-      stmtAuthObject = columnMetaDataPrivilege;
-    }
-
-    switch (stmtAuthObject.getOperationScope()) {
-
-    case SERVER :
-      // validate server level privileges if applicable. Eg create UDF,register jar etc ..
-      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
-      serverHierarchy.add(hiveAuthzBinding.getAuthServer());
-      inputHierarchy.add(serverHierarchy);
-      break;
-    case DATABASE:
-      // workaround for database scope statements (create/alter/drop db)
-      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
-      dbHierarchy.add(hiveAuthzBinding.getAuthServer());
-      dbHierarchy.add(currDB);
-      inputHierarchy.add(dbHierarchy);
-
-      if (currOutDB != null) {
-        List<DBModelAuthorizable> outputDbHierarchy = new ArrayList<DBModelAuthorizable>();
-        outputDbHierarchy.add(hiveAuthzBinding.getAuthServer());
-        outputDbHierarchy.add(currOutDB);
-        outputHierarchy.add(outputDbHierarchy);
-      } else {
-        outputHierarchy.add(dbHierarchy);
-      }
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-      break;
-    case TABLE:
-      // workaround for add partitions
-      if(partitionURI != null) {
-        inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
-      }
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-      for (WriteEntity writeEntity: outputs) {
-        if (filterWriteEntity(writeEntity)) {
-          continue;
-        }
-        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-        outputHierarchy.add(entityHierarchy);
-      }
-      // workaround for metadata queries.
-      // Capture the table name in pre-analyze and include that in the input entity list
-      if (currTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currDB);
-        externalAuthorizableHierarchy.add(currTab);
-        inputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-
-
-      // workaround for DDL statements
-      // Capture the table name in pre-analyze and include that in the output entity list
-      if (currOutTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currOutDB);
-        externalAuthorizableHierarchy.add(currOutTab);
-        outputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-
-      break;
-    case FUNCTION:
-      /* The 'FUNCTION' privilege scope currently used for
-       *  - CREATE TEMP FUNCTION
-       *  - DROP TEMP FUNCTION.
-       */
-      if (!udfURIs.isEmpty()) {
-        List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        udfUriHierarchy.addAll(udfURIs);
-        inputHierarchy.add(udfUriHierarchy);
-        for (WriteEntity writeEntity : outputs) {
-          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-          outputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    case CONNECT:
-      /* The 'CONNECT' is an implicit privilege scope currently used for
-       *  - USE <db>
-       *  It's allowed when the user has any privilege on the current database. For application
-       *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
-       */
-      List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
-      connectHierarchy.add(hiveAuthzBinding.getAuthServer());
-      // by default allow connect access to default db
-      Table currTbl = Table.ALL;
-      Column currCol = Column.ALL;
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
-          "false".equalsIgnoreCase(authzConf.
-              get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
-        currDB = Database.ALL;
-        currTbl = Table.SOME;
-      }
-
-      connectHierarchy.add(currDB);
-      connectHierarchy.add(currTbl);
-      connectHierarchy.add(currCol);
-
-      inputHierarchy.add(connectHierarchy);
-      outputHierarchy.add(connectHierarchy);
-      break;
-    case COLUMN:
-      for (ReadEntity readEntity: inputs) {
-        if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
-          addColumnHierarchy(inputHierarchy, readEntity);
-        } else {
-          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-          entityHierarchy.add(Column.ALL);
-          inputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    default:
-      throw new AuthorizationException("Unknown operation scope type " +
-          stmtAuthObject.getOperationScope().toString());
-    }
-
-    HiveAuthzBinding binding = null;
-    try {
-      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
-    } catch (SemanticException e) {
-      // Will use the original hiveAuthzBinding
-      binding = hiveAuthzBinding;
-    }
-    // validate permission
-    binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy,
-        outputHierarchy);
-  }
-
-  // Build the hierarchy of authorizable object for the given entity type.
-  private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) {
-    List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>();
-    switch (entity.getType()) {
-    case TABLE:
-      objectHierarchy.add(new Database(entity.getTable().getDbName()));
-      objectHierarchy.add(new Table(entity.getTable().getTableName()));
-      break;
-    case PARTITION:
-    case DUMMYPARTITION:
-      objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
-      objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
-      break;
-    case DFS_DIR:
-    case LOCAL_DIR:
-      try {
-        objectHierarchy.add(parseURI(entity.toString(),
-            entity.getType().equals(Entity.Type.LOCAL_DIR)));
-      } catch (Exception e) {
-        throw new AuthorizationException("Failed to get File URI", e);
-      }
-      break;
-    case DATABASE:
-    case FUNCTION:
-      // TODO use database entities from compiler instead of capturing from AST
-      break;
-    default:
-      throw new UnsupportedOperationException("Unsupported entity type " +
-          entity.getType().name());
-    }
-    return objectHierarchy;
-  }
-
-  /**
-   * Add column level hierarchy to inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  protected void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy,
-      ReadEntity entity) {
-    List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-    entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-    entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity));
-
-    switch (entity.getType()) {
-    case TABLE:
-    case PARTITION:
-      List<String> cols = entity.getAccessedColumns();
-      for (String col : cols) {
-        List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy);
-        colHierarchy.add(new Column(col));
-        inputHierarchy.add(colHierarchy);
-      }
-      break;
-    default:
-      inputHierarchy.add(entityHierarchy);
-    }
-  }
-
-  /**
-   * Get Authorizable from inputs and put into inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  protected void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy,
-      Set<ReadEntity> inputs) {
-    for (ReadEntity readEntity: inputs) {
-      // skip the tables/view that are part of expanded view definition
-      // skip the Hive generated dummy entities created for queries like 'select <expr>'
-      if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
-        continue;
-      }
-      if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
-        addColumnHierarchy(inputHierarchy, readEntity);
-      } else {
-        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-        inputHierarchy.add(entityHierarchy);
-      }
-    }
-  }
-
-  // Check if this write entity needs to skipped
-  private boolean filterWriteEntity(WriteEntity writeEntity)
-      throws AuthorizationException {
-    // skip URI validation for session scratch file URIs
-    if (writeEntity.isTempURI()) {
-      return true;
-    }
-    try {
-      if (writeEntity.getTyp().equals(Type.DFS_DIR)
-          || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
-        HiveConf conf = SessionState.get().getConf();
-        String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-        URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
-        URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          writeEntity.getLocation().getPath()));
-        LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI);
-        if (PathUtils.impliesURI(scratchURI, requestURI)) {
-          return true;
-        }
-        URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
-        URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
-        LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI);
-        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
-          return true;
-        }
-      }
-    } catch (Exception e) {
-      throw new AuthorizationException("Failed to extract uri details", e);
-    }
-    return false;
-  }
-
-  public static List<String> filterShowTables(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName, String dbName)
-          throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    for (String tableName : queryResult) {
-      // if user has privileges on table, add to filtered list, else discard
-      Table table = new Table(tableName);
-      Database database;
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(table.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<FieldSchema> filterShowColumns(
-      HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols,
-      HiveOperation operation, String userName, String tableName, String dbName)
-          throws SemanticException {
-    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
-    Subject subject = new Subject(userName);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    Database database = new Database(dbName);
-    Table table = new Table(tableName);
-    for (FieldSchema col : cols) {
-      // if user has privileges on column, add to filtered list, else discard
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(new Column(col.getName()));
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(col);
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the column is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<String> filterShowDatabases(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName) throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)).
-        setOperationScope(HiveOperationScope.CONNECT).
-        setOperationType(HiveOperationType.QUERY).
-        build();
-
-    for (String dbName:queryResult) {
-      // if user has privileges on database, add to filtered list, else discard
-      Database database = null;
-
-      // if default is not restricted, continue
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase(
-        hiveAuthzBinding.getAuthzConf().get(
-              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
-              "false"))) {
-        filteredResult.add(DEFAULT_DATABASE_NAME);
-        continue;
-      }
-
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(Table.ALL);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(database.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-
-    return filteredResult;
-  }
-
-  /**
-   * Check if the given read entity is a table that has parents of type Table
-   * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both
-   * the original view and the tables/view that it selects from .
-   * The access authorization is only interested in the top level views and not the underlying tables.
-   * @param readEntity
-   * @return
-   */
-  private boolean isChildTabForView(ReadEntity readEntity) {
-    // If this is a table added for view, then we need to skip that
-    if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) {
-      return false;
-    }
-    if (readEntity.getParents() != null && readEntity.getParents().size() > 0) {
-      for (ReadEntity parentEntity : readEntity.getParents()) {
-        if (!parentEntity.getType().equals(Type.TABLE)) {
-          return false;
-        }
-      }
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Returns the hooks specified in a configuration variable.  The hooks are returned in a list in
-   * the order they were specified in the configuration variable.
-   *
-   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
-   *                    class names.
-   * @return            A list of the hooks, in the order they are listed in the value of hookConfVar
-   * @throws Exception
-   */
-  private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception {
-
-    List<T> hooks = new ArrayList<T>();
-    if (csHooks.isEmpty()) {
-      return hooks;
-    }
-    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
-      try {
-        @SuppressWarnings("unchecked")
-        T hook =
-            (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
-        hooks.add(hook);
-      } catch (ClassNotFoundException e) {
-        LOG.error(hookClass + " Class not found:" + e.getMessage());
-        throw e;
-      }
-    }
-
-    return hooks;
-  }
-
-  // Check if the given entity is identified as dummy by Hive compilers.
-  private boolean isDummyEntity(Entity entity) {
-    return entity.isDummy();
-  }
-
-  // create hiveBinding with PrivilegeCache
-  private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
-      String userName) throws SemanticException {
-    // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider
-    AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider();
-    Set<String> userPrivileges =
-        authProvider.getPolicyEngine().getPrivileges(
-            authProvider.getGroupMapping().getGroups(userName), Sets.newHashSet(userName),
-            hiveAuthzBinding.getActiveRoleSet(), hiveAuthzBinding.getAuthServer());
-
-    // create PrivilegeCache using user's privileges
-    PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
-    try {
-      // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend
-      return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(),
-              hiveAuthzBinding.getAuthzConf(), privilegeCache);
-    } catch (Exception e) {
-      LOG.error("Can not create HiveAuthzBinding with privilege cache.");
-      throw new SemanticException(e);
-    }
-  }
-
-  private static boolean hasPrefixMatch(List<String> prefixList, final String str) {
-    for (String prefix : prefixList) {
-      if (str.startsWith(prefix)) {
-        return true;
-      }
-    }
-
-    return false;
-  }
-
-  /**
-   * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null,
-   * the URI authorization checks will be skipped.
-   */
-  protected void setSerdeURI(String serdeClassName) throws SemanticException {
-    if (!serdeURIPrivilegesEnabled) {
-      return;
-    }
-
-    // WhiteList Serde Jar can be used by any users. WhiteList checking is
-    // done by comparing the Java package name. The assumption is cluster
-    // admin will ensure there is no Java namespace collision.
-    // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should
-    // ensure no custom Serde class is introduced under the same namespace.
-    if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
-      try {
-        CodeSource serdeSrc =
-            Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader())
-                .getProtectionDomain().getCodeSource();
-        if (serdeSrc == null) {
-          throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName);
-        }
-
-        String serdeJar = serdeSrc.getLocation().getPath();
-        if (serdeJar == null || serdeJar.isEmpty()) {
-          throw new SemanticException("Could not find the jar for Serde class " + serdeClassName
-              + "to validate privileges");
-        }
-
-        serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
-      } catch (ClassNotFoundException e) {
-        throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e);
-      }
-    }
-  }
-
-  protected HiveOperation getCurrentHiveStmtOp() {
-    SessionState sessState = SessionState.get();
-    if (sessState == null) {
-      // TODO: Warn
-      return null;
-    }
-    return sessState.getHiveOperation();
-  }
-
-  protected Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
-    // Extract the username from the hook context
-    return new Subject(context.getUserName());
-  }
-
-}