You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sp...@apache.org on 2018/06/27 16:47:41 UTC

[14/17] sentry git commit: SENTRY-2282: Remove hive-authzv2 binding and tests modules completely (Sergio Pena, reviewed by Na Li)

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingBaseV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingBaseV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingBaseV2.java
deleted file mode 100644
index 88fa4a1..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingBaseV2.java
+++ /dev/null
@@ -1,459 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.metastore;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.Iterator;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreEventContext;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.shims.Utils;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.core.model.db.Table;
-
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
-
-/**
- * Sentry binding for Hive Metastore. The binding is integrated into Metastore
- * via the pre-event listener which are fired prior to executing the metadata
- * action. This point we are only authorizing metadata writes since the listners
- * are not fired from read events. Each action builds a input and output
- * hierarchy as per the objects used in the given operations. This is then
- * passed down to the hive binding which handles the authorization. This ensures
- * that we follow the same privilege model and policies.
- */
-public abstract class MetastoreAuthzBindingBaseV2 extends MetaStorePreEventListener {
-
-  /**
-   * Build the set of object hierarchies ie fully qualified db model objects
-   */
-  protected static class HierarcyBuilder {
-    private List<List<DBModelAuthorizable>> authHierarchy;
-
-    public HierarcyBuilder() {
-      authHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-    }
-
-    public HierarcyBuilder addServerToOutput(Server server) {
-      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
-      serverHierarchy.add(server);
-      authHierarchy.add(serverHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addDbToOutput(Server server, String dbName) {
-      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
-      addServerToOutput(server);
-      dbHierarchy.add(server);
-      dbHierarchy.add(new Database(dbName));
-      authHierarchy.add(dbHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addUriToOutput(Server server, String uriPath,
-        String warehouseDirPath) throws MetaException {
-      List<DBModelAuthorizable> uriHierarchy = new ArrayList<DBModelAuthorizable>();
-      addServerToOutput(server);
-      uriHierarchy.add(server);
-      try {
-        uriHierarchy.add(new AccessURI(PathUtils.parseDFSURI(warehouseDirPath,
-            uriPath)));
-      } catch (URISyntaxException e) {
-        throw new MetaException("Error paring the URI " + e.getMessage());
-      }
-      authHierarchy.add(uriHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addTableToOutput(Server server, String dbName,
-        String tableName) {
-      List<DBModelAuthorizable> tableHierarchy = new ArrayList<DBModelAuthorizable>();
-      addDbToOutput(server, dbName);
-      tableHierarchy.add(server);
-      tableHierarchy.add(new Database(dbName));
-      tableHierarchy.add(new Table(tableName));
-      authHierarchy.add(tableHierarchy);
-      return this;
-    }
-
-    public List<List<DBModelAuthorizable>> build() {
-      return authHierarchy;
-    }
-  }
-
-  private HiveAuthzConf authzConf;
-  private final Server authServer;
-  private final HiveConf hiveConf;
-  private final ImmutableSet<String> serviceUsers;
-  private HiveAuthzBinding hiveAuthzBinding;
-  private final String warehouseDir;
-  protected static boolean sentryCacheOutOfSync = false;
-
-  public MetastoreAuthzBindingBaseV2(Configuration config) throws Exception {
-    super(config);
-    String hiveAuthzConf = config.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-    if (hiveAuthzConf == null
-        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      throw new IllegalArgumentException("Configuration key "
-          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-          + "' is invalid.");
-    }
-    try {
-      authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-    } catch (MalformedURLException e) {
-      throw new IllegalArgumentException("Configuration key "
-          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " specifies a malformed URL '"
-          + hiveAuthzConf + "'", e);
-    }
-    hiveConf = new HiveConf(config, this.getClass());
-    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME
-        .getVar()));
-    serviceUsers = ImmutableSet.copyOf(toTrimedLower(Sets.newHashSet(authzConf
-        .getStrings(AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(),
-            new String[] { "" }))));
-    warehouseDir = hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
-
-  }
-
-  /**
-   * Main listener callback which is the entry point for Sentry
-   */
-  @Override
-  public void onEvent(PreEventContext context) throws MetaException,
-      NoSuchObjectException, InvalidOperationException {
-
-    if (!needsAuthorization(getUserName())) {
-      return;
-    }
-    switch (context.getEventType()) {
-    case CREATE_TABLE:
-      authorizeCreateTable((PreCreateTableEvent) context);
-      break;
-    case DROP_TABLE:
-      authorizeDropTable((PreDropTableEvent) context);
-      break;
-    case ALTER_TABLE:
-      authorizeAlterTable((PreAlterTableEvent) context);
-      break;
-    case ADD_PARTITION:
-      authorizeAddPartition((PreAddPartitionEvent) context);
-      break;
-    case DROP_PARTITION:
-      authorizeDropPartition((PreDropPartitionEvent) context);
-      break;
-    case ALTER_PARTITION:
-      authorizeAlterPartition((PreAlterPartitionEvent) context);
-      break;
-    case CREATE_DATABASE:
-      authorizeCreateDatabase();
-      break;
-    case DROP_DATABASE:
-      authorizeDropDatabase((PreDropDatabaseEvent) context);
-      break;
-    case LOAD_PARTITION_DONE:
-      // noop for now
-      break;
-    default:
-      break;
-    }
-  }
-
-  private void authorizeCreateDatabase()
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(HiveOperation.CREATEDATABASE,
-        new HierarcyBuilder().addServerToOutput(getAuthServer()).build(),
-        new HierarcyBuilder().addServerToOutput(getAuthServer()).build());
-  }
-
-  private void authorizeDropDatabase(PreDropDatabaseEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(HiveOperation.DROPDATABASE,
- new HierarcyBuilder()
-.addDbToOutput(getAuthServer(),
-            context.getDatabase().getName()).build(),
-        new HierarcyBuilder().addDbToOutput(getAuthServer(),
-            context.getDatabase().getName()).build());
-  }
-
-  private void authorizeCreateTable(PreCreateTableEvent context)
-      throws InvalidOperationException, MetaException {
-    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-    inputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder();
-    outputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
-
-    if (!StringUtils.isEmpty(context.getTable().getSd().getLocation())) {
-      String uriPath;
-      try {
-        uriPath = PathUtils.parseDFSURI(warehouseDir,
-            getSdLocation(context.getTable().getSd()));
-      } catch(URISyntaxException e) {
-        throw new MetaException(e.getMessage());
-      }
-      inputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-    }
-    authorizeMetastoreAccess(HiveOperation.CREATETABLE, inputBuilder.build(),
-        outputBuilder.build());
-  }
-
-  private void authorizeDropTable(PreDropTableEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(
-        HiveOperation.DROPTABLE,
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(), context.getTable().getTableName())
-            .build(),
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(), context.getTable().getTableName())
-            .build());
-  }
-
-  private void authorizeAlterTable(PreAlterTableEvent context)
-      throws InvalidOperationException, MetaException {
-    /*
-     * There are multiple alter table options and it's tricky to figure which is
-     * attempted here. Currently all alter table needs full level privilege
-     * except the for setting location which also needs a privile on URI. Hence
-     * we set initially set the operation to ALTERTABLE_ADDCOLS. If the client
-     * has specified the location, then change to ALTERTABLE_LOCATION
-     */
-    HiveOperation operation = HiveOperation.ALTERTABLE_ADDCOLS;
-    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-    inputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
-        .getDbName(), context.getOldTable().getTableName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder();
-    outputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
-        .getDbName(), context.getOldTable().getTableName());
-
-    // if the operation requires location change, then add URI privilege check
-    String oldLocationUri = null;
-    String newLocationUri = null;
-    try {
-      if (!StringUtils.isEmpty(context.getOldTable().getSd().getLocation())) {
-        oldLocationUri = PathUtils.parseDFSURI(warehouseDir,
-            getSdLocation(context.getOldTable().getSd()));
-      }
-      if (!StringUtils.isEmpty(context.getNewTable().getSd().getLocation())) {
-        newLocationUri = PathUtils.parseDFSURI(warehouseDir,
-            getSdLocation(context.getNewTable().getSd()));
-      }
-    } catch (URISyntaxException e) {
-      throw new MetaException(e.getMessage());
-    }
-    if (!StringUtils.equals(oldLocationUri, newLocationUri)) {
-      outputBuilder.addUriToOutput(getAuthServer(), newLocationUri,
-          warehouseDir);
-      operation = HiveOperation.ALTERTABLE_LOCATION;
-    }
-    authorizeMetastoreAccess(
-        operation,
-        inputBuilder.build(), outputBuilder.build());
-  }
-
-  private void authorizeAddPartition(PreAddPartitionEvent context)
-      throws InvalidOperationException, MetaException, NoSuchObjectException {
-    for (Partition mapiPart : context.getPartitions()) {
-	    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-      inputBuilder.addTableToOutput(getAuthServer(), mapiPart
-          .getDbName(), mapiPart.getTableName());
-      HierarcyBuilder outputBuilder = new HierarcyBuilder();
-	    outputBuilder.addTableToOutput(getAuthServer(), mapiPart
-	        .getDbName(), mapiPart.getTableName());
-	    // check if we need to validate URI permissions when storage location is
-	    // non-default, ie something not under the parent table
-
-      String partitionLocation = null;
-      if (mapiPart.isSetSd()) {
-        partitionLocation = mapiPart.getSd().getLocation();
-	    }
-	    if (!StringUtils.isEmpty(partitionLocation)) {
-	      String tableLocation = context
-	          .getHandler()
-	          .get_table(mapiPart.getDbName(),
-	              mapiPart.getTableName()).getSd().getLocation();
-	      String uriPath;
-	      try {
-	        uriPath = PathUtils.parseDFSURI(warehouseDir, mapiPart
-	            .getSd().getLocation());
-	      } catch (URISyntaxException e) {
-	        throw new MetaException(e.getMessage());
-	      }
-        if (!partitionLocation.equals(tableLocation) &&
-            !partitionLocation.startsWith(tableLocation + File.separator)) {
-          outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-	      }
-	    }
-      authorizeMetastoreAccess(HiveOperation.ALTERTABLE_ADDPARTS,
-	        inputBuilder.build(), outputBuilder.build());
-    }
-  }
-
-  protected void authorizeDropPartition(PreDropPartitionEvent context)
-      throws InvalidOperationException, MetaException {
-    Iterator<Partition> partitionIterator = context.getPartitionIterator();
-    HierarcyBuilder inputHierarchy = new HierarcyBuilder();
-    HierarcyBuilder outputHierarchy = new HierarcyBuilder();
-    Partition partition;
-    while(partitionIterator.hasNext()) {
-      partition = partitionIterator.next();
-      inputHierarchy.addTableToOutput(getAuthServer(), partition.getDbName(),
-          partition.getTableName());
-      outputHierarchy.addTableToOutput(getAuthServer(), partition.getDbName(),
-          partition.getTableName());
-    }
-    authorizeMetastoreAccess(
-        HiveOperation.ALTERTABLE_DROPPARTS, inputHierarchy.build(), outputHierarchy.build());
-  }
-
-  private void authorizeAlterPartition(PreAlterPartitionEvent context)
-      throws InvalidOperationException, MetaException, NoSuchObjectException {
-    /*
-     * There are multiple alter partition options and it's tricky to figure out
-     * which is attempted here. Currently all alter partition need full level
-     * privilege except the for setting location which also needs a privilege on
-     * URI. Currently we don't try to distinguish the operation type. All alter
-     * partitions are treated as set-location
-     */
-    HierarcyBuilder inputBuilder = new HierarcyBuilder().addTableToOutput(
-        getAuthServer(), context.getDbName(), context.getTableName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder().addTableToOutput(
-        getAuthServer(), context.getDbName(), context.getTableName());
-
-    Partition partition = context.getNewPartition();
-    String partitionLocation = getSdLocation(partition.getSd());
-    if (!StringUtils.isEmpty(partitionLocation)) {
-      String tableLocation = context.getHandler().get_table(
-          partition.getDbName(), partition.getTableName()).getSd().getLocation();
-
-      String uriPath;
-      try {
-        uriPath = PathUtils.parseDFSURI(warehouseDir, partitionLocation);
-        } catch (URISyntaxException e) {
-        throw new MetaException(e.getMessage());
-      }
-      if (!partitionLocation.startsWith(tableLocation + File.separator)) {
-        outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-      }
-    }
-    authorizeMetastoreAccess(
-        HiveOperation.ALTERPARTITION_LOCATION,
-        inputBuilder.build(), outputBuilder.build());
-  }
-
-  protected InvalidOperationException invalidOperationException(Exception e) {
-    InvalidOperationException ex = new InvalidOperationException(e.getMessage());
-    ex.initCause(e.getCause());
-    return ex;
-  }
-
-  /**
-   * Assemble the required privileges and requested privileges. Validate using
-   * Hive bind auth provider
-   * @param hiveOp
-   * @param inputHierarchy
-   * @param outputHierarchy
-   * @throws InvalidOperationException
-   */
-  protected abstract void authorizeMetastoreAccess(HiveOperation hiveOp,
-      List<List<DBModelAuthorizable>> inputHierarchy,
-      List<List<DBModelAuthorizable>> outputHierarchy)
-      throws InvalidOperationException;
-
-  public Server getAuthServer() {
-    return authServer;
-  }
-
-  private boolean needsAuthorization(String userName) {
-    return !serviceUsers.contains(userName);
-  }
-
-  private static Set<String> toTrimedLower(Set<String> s) {
-    Set<String> result = Sets.newHashSet();
-    for (String v : s) {
-      result.add(v.trim().toLowerCase());
-    }
-    return result;
-  }
-
-  protected HiveAuthzBinding getHiveAuthzBinding() throws Exception {
-    if (hiveAuthzBinding == null) {
-      hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, hiveConf, authzConf);
-    }
-    return hiveAuthzBinding;
-  }
-
-  protected String getUserName() throws MetaException {
-    try {
-      return Utils.getUGI().getShortUserName();
-    } catch (LoginException e) {
-      throw new MetaException("Failed to get username " + e.getMessage());
-    } catch (IOException e) {
-      throw new MetaException("Failed to get username " + e.getMessage());
-    }
-  }
-
-  private String getSdLocation(StorageDescriptor sd) {
-    if (sd == null) {
-      return "";
-    } else {
-      return sd.getLocation();
-    }
-  }
-
-  public static boolean isSentryCacheOutOfSync() {
-    return sentryCacheOutOfSync;
-  }
-
-  public static void setSentryCacheOutOfSync(boolean sentryCacheOutOfSync) {
-    MetastoreAuthzBindingBaseV2.sentryCacheOutOfSync = sentryCacheOutOfSync;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
deleted file mode 100644
index 107fe1f..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2.metastore;
-
-import java.io.IOException;
-import java.util.List;
-
-import javax.security.auth.login.LoginException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.v2.HiveAuthzPrivilegesMapV2;
-import org.apache.sentry.binding.metastore.MetastoreAuthzBindingBaseV2;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.common.exception.SentryUserException;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-
-/**
- * Sentry binding for Hive Metastore. The binding is integrated into Metastore
- * via the pre-event listener which are fired prior to executing the metadata
- * action. This point we are only authorizing metadata writes since the listners
- * are not fired from read events. Each action builds a input and output
- * hierarchy as per the objects used in the given operations. This is then
- * passed down to the hive binding which handles the authorization. This ensures
- * that we follow the same privilege model and policies.
- */
-public class MetastoreAuthzBindingV2 extends MetastoreAuthzBindingBaseV2 {
-
-  public MetastoreAuthzBindingV2(Configuration config) throws Exception {
-    super(config);
-  }
-
-  @Override
-  protected void authorizeDropPartition(PreDropPartitionEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(
-        HiveOperation.ALTERTABLE_DROPPARTS,
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(),
-            context.getTable().getTableName()).build(),
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(),
-            context.getTable().getTableName()).build());
-  }
-
-  /**
-   * Assemble the required privileges and requested privileges. Validate using
-   * Hive bind auth provider
-   * @param hiveOp
-   * @param inputHierarchy
-   * @param outputHierarchy
-   * @throws InvalidOperationException
-   */
-  @Override
-  protected void authorizeMetastoreAccess(HiveOperation hiveOp,
-      List<List<DBModelAuthorizable>> inputHierarchy,
-      List<List<DBModelAuthorizable>> outputHierarchy)
-      throws InvalidOperationException {
-    if (isSentryCacheOutOfSync()) {
-      throw invalidOperationException(new SentryUserException(
-          "Metastore/Sentry cache is out of sync"));
-    }
-    try {
-      HiveAuthzBinding hiveAuthzBinding = getHiveAuthzBinding();
-      hiveAuthzBinding.authorize(hiveOp, HiveAuthzPrivilegesMapV2
-          .getHiveAuthzPrivileges(hiveOp), new Subject(getUserName()),
-          inputHierarchy, outputHierarchy);
-    } catch (AuthorizationException e1) {
-      throw invalidOperationException(e1);
-    } catch (LoginException e1) {
-      throw invalidOperationException(e1);
-    } catch (IOException e1) {
-      throw invalidOperationException(e1);
-    } catch (Exception e) {
-      throw invalidOperationException(e);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryHiveMetaStoreClientV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryHiveMetaStoreClientV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryHiveMetaStoreClientV2.java
deleted file mode 100644
index 1d7a4eb..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryHiveMetaStoreClientV2.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.metastore;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.List;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.UnknownDBException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.thrift.TException;
-
-public class SentryHiveMetaStoreClientV2 extends HiveMetaStoreClient implements
-    IMetaStoreClient {
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private HiveAuthzConf authzConf;
-
-  public SentryHiveMetaStoreClientV2(HiveConf conf) throws MetaException {
-    super(conf);
-  }
-
-  public SentryHiveMetaStoreClientV2(HiveConf conf, HiveMetaHookLoader hookLoader)
-      throws MetaException {
-    super(conf, hookLoader);
-  }
-
-  @Override
-  public List<String> getDatabases(String databasePattern) throws MetaException {
-    return filterDatabases(super.getDatabases(databasePattern));
-  }
-
-  @Override
-  public List<String> getAllDatabases() throws MetaException {
-    return filterDatabases(super.getAllDatabases());
-  }
-
-  @Override
-  public List<String> getTables(String dbName, String tablePattern)
-      throws MetaException {
-    return filterTables(dbName, super.getTables(dbName, tablePattern));
-  }
-
-  @Override
-  public List<String> getAllTables(String dbName) throws MetaException {
-    return filterTables(dbName, super.getAllTables(dbName));
-  }
-
-  @Override
-  public List<String> listTableNamesByFilter(String dbName, String filter,
-      short maxTables) throws InvalidOperationException, UnknownDBException,
-      TException {
-    return filterTables(dbName,
-        super.listTableNamesByFilter(dbName, filter, maxTables));
-  }
-
-  /**
-   * Invoke Hive database filtering that removes the entries which use has no
-   * privileges to access
-   *
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterDatabases(List<String> dbList)
-      throws MetaException {
-    try {
-      return HiveAuthzBindingHookBaseV2.filterShowDatabases(getHiveAuthzBinding(),
-          dbList, HiveOperation.SHOWDATABASES, getUserName());
-    } catch (SemanticException e) {
-      throw new MetaException("Error getting DB list " + e.getMessage());
-    }
-  }
-
-  /**
-   * Invoke Hive table filtering that removes the entries which use has no
-   * privileges to access
-   *
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterTables(String dbName, List<String> tabList)
-      throws MetaException {
-    try {
-      return HiveAuthzBindingHookBaseV2.filterShowTables(getHiveAuthzBinding(),
-          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
-    } catch (SemanticException e) {
-      throw new MetaException("Error getting Table list " + e.getMessage());
-    }
-  }
-
-  private String getUserName() {
-    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
-  }
-
-  /**
-   * load Hive auth provider
-   *
-   * @return
-   * @throws MetaException
-   */
-  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
-    if (hiveAuthzBinding == null) {
-      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-      if (hiveAuthzConf == null
-          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-            + "' is invalid.");
-      }
-      try {
-        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-      } catch (MalformedURLException e) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "' "
-            + e.getMessage());
-      }
-      try {
-        hiveAuthzBinding = new HiveAuthzBinding(
-            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
-      } catch (Exception e) {
-        throw new MetaException("Failed to load Hive binding " + e.getMessage());
-      }
-    }
-    return hiveAuthzBinding;
-  }
-
-  private HiveConf getConf() {
-    return SessionState.get().getConf();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetaStoreFilterHook.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetaStoreFilterHook.java
deleted file mode 100644
index 86bf7aa..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetaStoreFilterHook.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.metastore;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreFilterHook;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PartitionSpec;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHookBaseV2;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-
-public class SentryMetaStoreFilterHook implements MetaStoreFilterHook {
-
-  static final protected Log LOG = LogFactory.getLog(SentryMetaStoreFilterHook.class);
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private HiveAuthzConf authzConf;
-
-  public SentryMetaStoreFilterHook(HiveConf hiveConf) { //NOPMD
-  }
-
-  @Override
-  public List<String> filterDatabases(List<String> dbList) {
-    return filterDb(dbList);
-  }
-
-  @Override
-  public Database filterDatabase(Database dataBase)
-      throws NoSuchObjectException {
-    return dataBase;
-  }
-
-  @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) {
-    return filterTab(dbName, tableList);
-  }
-
-  @Override
-  public Table filterTable(Table table) throws NoSuchObjectException {
-    return table;
-  }
-
-  @Override
-  public List<Table> filterTables(List<Table> tableList) {
-    return tableList;
-  }
-
-  @Override
-  public List<Partition> filterPartitions(List<Partition> partitionList) {
-    return partitionList;
-  }
-
-  @Override
-  public List<PartitionSpec> filterPartitionSpecs(
-      List<PartitionSpec> partitionSpecList) {
-    return partitionSpecList;
-  }
-
-  @Override
-  public Partition filterPartition(Partition partition)
-      throws NoSuchObjectException {
-    return partition;
-  }
-
-  @Override
-  public List<String> filterPartitionNames(String dbName, String tblName,
-      List<String> partitionNames) {
-    return partitionNames;
-  }
-
-  @Override
-  public Index filterIndex(Index index) throws NoSuchObjectException {
-    return index;
-  }
-
-  @Override
-  public List<String> filterIndexNames(String dbName, String tblName,
-      List<String> indexList) {
-    return indexList;
-  }
-
-  @Override
-  public List<Index> filterIndexes(List<Index> indexeList) {
-    return indexeList;
-  }
-
-  /**
-   * Invoke Hive database filtering that removes the entries which use has no
-   * privileges to access
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterDb(List<String> dbList) {
-    try {
-      return HiveAuthzBindingHookBaseV2.filterShowDatabases(getHiveAuthzBinding(),
-          dbList, HiveOperation.SHOWDATABASES, getUserName());
-    } catch (Exception e) {
-      LOG.warn("Error getting DB list ", e);
-      return new ArrayList<String>();
-    } finally {
-      close();
-    }
-  }
-
-  /**
-   * Invoke Hive table filtering that removes the entries which use has no
-   * privileges to access
-   * @param tabList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterTab(String dbName, List<String> tabList) {
-    try {
-      return HiveAuthzBindingHookBaseV2.filterShowTables(getHiveAuthzBinding(),
-          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
-    } catch (Exception e) {
-      LOG.warn("Error getting Table list ", e);
-      return new ArrayList<String>();
-    } finally {
-      close();
-    }
-  }
-
-  private String getUserName() {
-    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
-  }
-
-  /**
-   * load Hive auth provider
-   * @return
-   * @throws MetaException
-   */
-  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
-    if (hiveAuthzBinding == null) {
-      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-      if (hiveAuthzConf == null
-          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-            + "' is invalid.");
-      }
-      try {
-        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-      } catch (MalformedURLException e) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "' "
-            + e.getMessage());
-      }
-      try {
-        hiveAuthzBinding = new HiveAuthzBinding(
-            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
-      } catch (Exception e) {
-        throw new MetaException("Failed to load Hive binding " + e.getMessage());
-      }
-    }
-    return hiveAuthzBinding;
-  }
-
-  private HiveConf getConf() {
-    return SessionState.get().getConf();
-  }
-
-  private void close() {
-    if (hiveAuthzBinding != null) {
-      hiveAuthzBinding.close();
-      hiveAuthzBinding = null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerBaseV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerBaseV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerBaseV2.java
deleted file mode 100644
index 642e873..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerBaseV2.java
+++ /dev/null
@@ -1,416 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.metastore;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
-import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
-import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
-import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
-import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.DropTableEvent;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.sentry.core.common.exception.SentryUserException;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.core.common.Authorizable;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.core.model.db.Table;
-import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin;
-import org.apache.sentry.api.service.thrift.SentryPolicyServiceClient;
-import org.apache.sentry.service.thrift.SentryServiceClientFactory;
-import org.apache.sentry.service.thrift.ServiceConstants.ConfUtilties;
-import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SentryMetastorePostEventListenerBaseV2 extends MetaStoreEventListener {
-
-  private static final Logger LOGGER = LoggerFactory.getLogger(SentryMetastoreListenerPlugin.class);
-  private final HiveAuthzConf authzConf;
-  private final Server server;
-
-  protected List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>();
-
-  public SentryMetastorePostEventListenerBaseV2(Configuration config) {
-    super(config);
-
-    if (!(config instanceof HiveConf)) {
-        String error = "Could not initialize Plugin - Configuration is not an instanceof HiveConf";
-        LOGGER.error(error);
-        throw new RuntimeException(error);
-    }
-
-    authzConf = HiveAuthzConf.getAuthzConf((HiveConf)config);
-    server = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-    Iterable<String> pluginClasses = ConfUtilties.CLASS_SPLITTER
-        .split(config.get(ServerConfig.SENTRY_METASTORE_PLUGINS,
-            ServerConfig.SENTRY_METASTORE_PLUGINS_DEFAULT).trim());
-
-    try {
-      for (String pluginClassStr : pluginClasses) {
-        Class<?> clazz = config.getClassByName(pluginClassStr);
-        if (!SentryMetastoreListenerPlugin.class.isAssignableFrom(clazz)) {
-          throw new IllegalArgumentException("Class ["
-              + pluginClassStr + "] is not a "
-              + SentryMetastoreListenerPlugin.class.getName());
-        }
-        SentryMetastoreListenerPlugin plugin = (SentryMetastoreListenerPlugin) clazz
-            .getConstructor(Configuration.class, Configuration.class)
-            .newInstance(config, authzConf);
-        sentryPlugins.add(plugin);
-      }
-    } catch (Exception e) {
-      LOGGER.error("Could not initialize Plugin !!", e);
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void onCreateTable (CreateTableEvent tableEvent) throws MetaException {
-
-    // don't sync paths/privileges if the operation has failed
-    if (!tableEvent.getStatus()) {
-      LOGGER.debug("Skip sync paths/privileges with Sentry server for onCreateTable event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    if (tableEvent.getTable().getSd().getLocation() != null) {
-      String authzObj = tableEvent.getTable().getDbName() + "."
-          + tableEvent.getTable().getTableName();
-      String path = tableEvent.getTable().getSd().getLocation();
-      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-        plugin.addPath(authzObj, path);
-      }
-    }
-
-    // drop the privileges on the given table, in case if anything was left
-    // behind during the drop
-    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
-      return;
-    }
-
-    dropSentryTablePrivilege(tableEvent.getTable().getDbName(),
-        tableEvent.getTable().getTableName());
-  }
-
-  @Override
-  public void onDropTable(DropTableEvent tableEvent) throws MetaException {
-
-    // don't sync paths/privileges if the operation has failed
-    if (!tableEvent.getStatus()) {
-      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onDropTable event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    if (tableEvent.getTable().getSd().getLocation() != null) {
-      String authzObj = tableEvent.getTable().getDbName() + "."
-          + tableEvent.getTable().getTableName();
-      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-        plugin.removeAllPaths(authzObj, null);
-      }
-    }
-    // drop the privileges on the given table
-    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
-      return;
-    }
-
-    if (!tableEvent.getStatus()) {
-      return;
-    }
-
-    dropSentryTablePrivilege(tableEvent.getTable().getDbName(),
-        tableEvent.getTable().getTableName());
-  }
-
-  @Override
-  public void onCreateDatabase(CreateDatabaseEvent dbEvent)
-      throws MetaException {
-
-    // don't sync paths/privileges if the operation has failed
-    if (!dbEvent.getStatus()) {
-      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onCreateDatabase event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    if (dbEvent.getDatabase().getLocationUri() != null) {
-      String authzObj = dbEvent.getDatabase().getName();
-      String path = dbEvent.getDatabase().getLocationUri();
-      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-        plugin.addPath(authzObj, path);
-      }
-    }
-    // drop the privileges on the database, in case anything left behind during
-    // last drop db
-    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
-      return;
-    }
-
-    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
-  }
-
-  /**
-   * Drop the privileges on the database. Note that child tables will be
-   * dropped individually by client, so we just need to handle the removing
-   * the db privileges. The table drop should cleanup the table privileges.
-   */
-  @Override
-  public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
-
-    // don't sync paths/privileges if the operation has failed
-    if (!dbEvent.getStatus()) {
-      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onDropDatabase event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    String authzObj = dbEvent.getDatabase().getName();
-    for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-      List<String> tNames = dbEvent.getHandler().get_all_tables(authzObj);
-      plugin.removeAllPaths(authzObj, tNames);
-    }
-    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
-      return;
-    }
-
-    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
-  }
-
-  /**
-   * Adjust the privileges when table is renamed
-   */
-  @Override
-  public void onAlterTable (AlterTableEvent tableEvent) throws MetaException {
-
-    // don't sync privileges if the operation has failed
-    if (!tableEvent.getStatus()) {
-      LOGGER.debug("Skip syncing privileges with Sentry server for onAlterTable event," +
-        " since the operation failed. \n");
-      return;
-    }
-    String oldLoc = null, newLoc = null;
-    org.apache.hadoop.hive.metastore.api.Table oldTal = tableEvent.getOldTable();
-    org.apache.hadoop.hive.metastore.api.Table newTal = tableEvent.getNewTable();
-    if (oldTal != null && oldTal.getSd() != null) {
-      oldLoc = oldTal.getSd().getLocation();
-    }
-    if (newTal != null && newTal.getSd() != null) {
-      newLoc = newTal.getSd().getLocation();
-    }
-    if (oldLoc != null && newLoc != null && !oldLoc.equals(newLoc)) {
-      String oldDbName = tableEvent.getOldTable().getDbName();
-      String oldTbName = tableEvent.getOldTable().getTableName();
-      String newTbName = tableEvent.getNewTable().getTableName();
-      String newDbName = tableEvent.getNewTable().getDbName();
-      renameSentryTablePrivilege(oldDbName, oldTbName, oldLoc, newDbName, newTbName, newLoc);
-    }
-  }
-
-  @Override
-  public void onAlterPartition(AlterPartitionEvent partitionEvent)
-      throws MetaException {
-
-    // don't sync privileges if the operation has failed
-    if (!partitionEvent.getStatus()) {
-      LOGGER.debug("Skip syncing privileges with Sentry server for onAlterPartition event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    String oldLoc = null, newLoc = null;
-    if (partitionEvent.getOldPartition() != null) {
-      oldLoc = partitionEvent.getOldPartition().getSd().getLocation();
-    }
-    if (partitionEvent.getNewPartition() != null) {
-      newLoc = partitionEvent.getNewPartition().getSd().getLocation();
-    }
-
-    if (oldLoc != null && newLoc != null && !oldLoc.equals(newLoc)) {
-      String authzObj =
-          partitionEvent.getOldPartition().getDbName() + "."
-              + partitionEvent.getOldPartition().getTableName();
-      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-        plugin.renameAuthzObject(authzObj, oldLoc,
-            authzObj, newLoc);
-      }
-    }
-  }
-
-  @Override
-  public void onAddPartition(AddPartitionEvent partitionEvent)
-      throws MetaException {
-
-    // don't sync path if the operation has failed
-    if (!partitionEvent.getStatus()) {
-      LOGGER.debug("Skip syncing path with Sentry server for onAddPartition event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
-    while(partitionIterator.hasNext()) {
-      Partition part = partitionIterator.next();
-      if (part.getSd() != null && part.getSd().getLocation() != null) {
-        String authzObj = part.getDbName() + "." + part.getTableName();
-        String path = part.getSd().getLocation();
-        for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-          plugin.addPath(authzObj, path);
-        }
-      }
-    }
-    super.onAddPartition(partitionEvent);
-  }
-
-  @Override
-  public void onDropPartition(DropPartitionEvent partitionEvent)
-      throws MetaException {
-
-    // don't sync path if the operation has failed
-    if (!partitionEvent.getStatus()) {
-      LOGGER.debug("Skip syncing path with Sentry server for onDropPartition event," +
-        " since the operation failed. \n");
-      return;
-    }
-
-    String authzObj = partitionEvent.getTable().getDbName() + "."
-        + partitionEvent.getTable().getTableName();
-    Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
-    while (partitionIterator.hasNext()) {
-      Partition part = partitionIterator.next();
-      String path = part.getSd().getLocation();
-      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-        plugin.removePath(authzObj, path);
-      }
-    }
-    super.onDropPartition(partitionEvent);
-  }
-
-  private SentryPolicyServiceClient getSentryServiceClient()
-      throws MetaException {
-    try {
-      return SentryServiceClientFactory.create(authzConf);
-    } catch (Exception e) {
-      throw new MetaException("Failed to connect to Sentry service "
-          + e.getMessage());
-    }
-  }
-
-  private void dropSentryDbPrivileges(String dbName) throws MetaException {
-    List<Authorizable> authorizableTable = new ArrayList<Authorizable>();
-    authorizableTable.add(server);
-    authorizableTable.add(new Database(dbName));
-    try {
-      dropSentryPrivileges(authorizableTable);
-    } catch (SentryUserException e) {
-      throw new MetaException("Failed to remove Sentry policies for drop DB "
-          + dbName + " Error: " + e.getMessage());
-    } catch (IOException e) {
-      throw new MetaException("Failed to find local user " + e.getMessage());
-    }
-
-  }
-
-  private void dropSentryTablePrivilege(String dbName, String tabName)
-      throws MetaException {
-    List<Authorizable> authorizableTable = new ArrayList<Authorizable>();
-    authorizableTable.add(server);
-    authorizableTable.add(new Database(dbName));
-    authorizableTable.add(new Table(tabName));
-
-    try {
-      dropSentryPrivileges(authorizableTable);
-    } catch (SentryUserException e) {
-      throw new MetaException(
-          "Failed to remove Sentry policies for drop table " + dbName + "."
-              + tabName + " Error: " + e.getMessage());
-    } catch (IOException e) {
-      throw new MetaException("Failed to find local user " + e.getMessage());
-    }
-
-  }
-  private void dropSentryPrivileges(
-      List<? extends Authorizable> authorizableTable)
-      throws SentryUserException, IOException, MetaException {
-    String requestorUserName = UserGroupInformation.getCurrentUser()
-        .getShortUserName();
-    try(SentryPolicyServiceClient sentryClient = getSentryServiceClient()) {
-      sentryClient.dropPrivileges(requestorUserName, authorizableTable);
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  private void renameSentryTablePrivilege(String oldDbName, String oldTabName,
-      String oldPath, String newDbName, String newTabName, String newPath)
-      throws MetaException {
-    List<Authorizable> oldAuthorizableTable = new ArrayList<Authorizable>();
-    oldAuthorizableTable.add(server);
-    oldAuthorizableTable.add(new Database(oldDbName));
-    oldAuthorizableTable.add(new Table(oldTabName));
-
-    List<Authorizable> newAuthorizableTable = new ArrayList<Authorizable>();
-    newAuthorizableTable.add(server);
-    newAuthorizableTable.add(new Database(newDbName));
-    newAuthorizableTable.add(new Table(newTabName));
-
-    if (!oldTabName.equalsIgnoreCase(newTabName)
-        && syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_ALTER_WITH_POLICY_STORE)) {
-
-      try (SentryPolicyServiceClient sentryClient = getSentryServiceClient()){
-        String requestorUserName = UserGroupInformation.getCurrentUser()
-            .getShortUserName();
-        sentryClient.renamePrivileges(requestorUserName, oldAuthorizableTable, newAuthorizableTable);
-      } catch (SentryUserException e) {
-        throw new MetaException(
-            "Failed to remove Sentry policies for rename table " + oldDbName
-            + "." + oldTabName + "to " + newDbName + "." + newTabName
-            + " Error: " + e.getMessage());
-      } catch (IOException e) {
-        throw new MetaException("Failed to find local user " + e.getMessage());
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-    // The HDFS plugin needs to know if it's a path change (set location)
-    for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-      plugin.renameAuthzObject(oldDbName + "." + oldTabName, oldPath,
-          newDbName + "." + newTabName, newPath);
-    }
-  }
-
-  private boolean syncWithPolicyStore(AuthzConfVars syncConfVar) {
-    return "true"
-        .equalsIgnoreCase(authzConf.get(syncConfVar.getVar(), "true"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
deleted file mode 100644
index ad649c3..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.v2.metastore;
-
-import java.util.Iterator;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
-import org.apache.sentry.binding.metastore.SentryMetastorePostEventListenerBaseV2;
-import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin;
-
-public class SentryMetastorePostEventListenerV2 extends SentryMetastorePostEventListenerBaseV2 {
-
-  public SentryMetastorePostEventListenerV2(Configuration config) {
-    super(config);
-  }
-
-  @Override
-  public void onAddPartition(AddPartitionEvent partitionEvent)
-      throws MetaException {
-    if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
-      Iterator<Partition> it = partitionEvent.getPartitionIterator();
-      while (it.hasNext()) {
-        Partition part = it.next();
-        if (part.getSd() != null && part.getSd().getLocation() != null) {
-          String authzObj = part.getDbName() + "." + part.getTableName();
-          String path = part.getSd().getLocation();
-          for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-            plugin.addPath(authzObj, path);
-          }
-        }
-      }
-    }
-  }
-
-  @Override
-  public void onDropPartition(DropPartitionEvent partitionEvent)
-      throws MetaException {
-    if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
-      String authzObj = partitionEvent.getTable().getDbName() + "."
-          + partitionEvent.getTable().getTableName();
-      Iterator<Partition> it = partitionEvent.getPartitionIterator();
-      while (it.hasNext()) {
-        Partition part = it.next();
-        if (part.getSd() != null && part.getSd().getLocation() != null) {
-          String path = part.getSd().getLocation();
-          for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
-            plugin.removePath(authzObj, path);
-          }
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
deleted file mode 100644
index 32479d8..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
+++ /dev/null
@@ -1,362 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.sentry.binding.hive.v2.util;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.ql.hooks.Hook;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.SentryOnFailureHook;
-import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessConstants;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Column;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.core.model.db.Table;
-import org.apache.sentry.api.service.thrift.TSentryGrantOption;
-import org.apache.sentry.api.service.thrift.TSentryPrivilege;
-import org.apache.sentry.api.service.thrift.TSentryRole;
-import org.apache.sentry.service.thrift.ServiceConstants.PrivilegeScope;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Splitter;
-
-public class SentryAuthorizerUtil {
-  public static final Logger LOG = LoggerFactory.getLogger(SentryAuthorizerUtil.class);
-  public static String UNKONWN_GRANTOR = "--";
-
-  /**
-   * Convert string to URI
-   *
-   * @param uri
-   * @param isLocal
-   * @throws SemanticException
-   * @throws URISyntaxException
-   */
-  public static AccessURI parseURI(String uri, boolean isLocal) throws URISyntaxException {
-    HiveConf conf = SessionState.get().getConf();
-    String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-    return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
-  }
-
-  /**
-   * Convert HivePrivilegeObject to DBModelAuthorizable list Now hive 0.13 don't support column
-   * level
-   *
-   * @param server
-   * @param privilege
-   */
-  public static List<List<DBModelAuthorizable>> getAuthzHierarchy(Server server,
-      HivePrivilegeObject privilege) {
-    List<DBModelAuthorizable> baseHierarchy = new ArrayList<DBModelAuthorizable>();
-    List<List<DBModelAuthorizable>> objectHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-    boolean isLocal = false;
-    if (privilege.getType() != null) {
-      switch (privilege.getType()) {
-        case GLOBAL:
-          baseHierarchy.add(new Server(privilege.getObjectName()));
-          objectHierarchy.add(baseHierarchy);
-          break;
-        case DATABASE:
-          baseHierarchy.add(server);
-          baseHierarchy.add(new Database(privilege.getDbname()));
-          objectHierarchy.add(baseHierarchy);
-          break;
-        case TABLE_OR_VIEW:
-          baseHierarchy.add(server);
-          baseHierarchy.add(new Database(privilege.getDbname()));
-          baseHierarchy.add(new Table(privilege.getObjectName()));
-          if (privilege.getColumns() != null) {
-            for (String columnName : privilege.getColumns()) {
-              List<DBModelAuthorizable> columnHierarchy =
-                  new ArrayList<DBModelAuthorizable>(baseHierarchy);
-              columnHierarchy.add(new Column(columnName));
-              objectHierarchy.add(columnHierarchy);
-            }
-          } else {
-            objectHierarchy.add(baseHierarchy);
-          }
-          break;
-        case LOCAL_URI:
-          isLocal = true;
-        case DFS_URI:
-          if (privilege.getObjectName() == null) {
-            break;
-          }
-          try {
-            baseHierarchy.add(server);
-            baseHierarchy.add(parseURI(privilege.getObjectName(), isLocal));
-            objectHierarchy.add(baseHierarchy);
-          } catch (Exception e) {
-            throw new AuthorizationException("Failed to get File URI", e);
-          }
-          break;
-        case FUNCTION:
-        case PARTITION:
-        case COLUMN:
-        case COMMAND_PARAMS:
-          // not support these type
-          break;
-        default:
-          break;
-      }
-    }
-    return objectHierarchy;
-  }
-
-  /**
-   * Convert HivePrivilegeObject list to List<List<DBModelAuthorizable>>
-   *
-   * @param server
-   * @param privilges
-   */
-  public static List<List<DBModelAuthorizable>> convert2SentryPrivilegeList(Server server,
-      List<HivePrivilegeObject> privilges) {
-    List<List<DBModelAuthorizable>> hierarchyList = new ArrayList<List<DBModelAuthorizable>>();
-    if (privilges != null && !privilges.isEmpty()) {
-      for (HivePrivilegeObject p : privilges) {
-        hierarchyList.addAll(getAuthzHierarchy(server, p));
-      }
-    }
-    return hierarchyList;
-  }
-
-  /**
-   * Convert HiveOperationType to HiveOperation
-   *
-   * @param type
-   */
-  public static HiveOperation convert2HiveOperation(String typeName) {
-    try {
-      return HiveOperation.valueOf(typeName);
-    } catch (Exception e) {
-      return null;
-    }
-  }
-
-  /**
-   * Convert HivePrivilege to Sentry Action
-   *
-   * @param hivePrivilege
-   */
-  public static String convert2SentryAction(HivePrivilege hivePrivilege) {
-    if (PrivilegeType.ALL.name().equals(hivePrivilege.getName())) {
-      return AccessConstants.ALL;
-    } else {
-      return hivePrivilege.getName();
-    }
-  }
-
-  /**
-   * Convert Sentry Action to HivePrivilege
-   *
-   * @param hivePrivilege
-   */
-  public static HivePrivilege convert2HivePrivilege(String action) {
-    return new HivePrivilege(action, null);
-  }
-
-  /**
-   * Convert TSentryRole Set to String List
-   *
-   * @param roleSet
-   */
-  public static List<String> convert2RoleList(Set<TSentryRole> roleSet) {
-    List<String> roles = new ArrayList<String>();
-    if (roleSet != null && !roleSet.isEmpty()) {
-      for (TSentryRole tRole : roleSet) {
-        roles.add(tRole.getRoleName());
-      }
-    }
-    return roles;
-  }
-
-  /**
-   * Convert TSentryPrivilege to HivePrivilegeInfo
-   *
-   * @param tPrivilege
-   * @param principal
-   */
-  public static HivePrivilegeInfo convert2HivePrivilegeInfo(TSentryPrivilege tPrivilege,
-      HivePrincipal principal) {
-    HivePrivilege hivePrivilege = convert2HivePrivilege(tPrivilege.getAction());
-    HivePrivilegeObject hivePrivilegeObject = convert2HivePrivilegeObject(tPrivilege);
-    // now sentry don't show grantor of a privilege
-    HivePrincipal grantor = new HivePrincipal(UNKONWN_GRANTOR, HivePrincipalType.ROLE);
-    boolean grantOption =
-        tPrivilege.getGrantOption().equals(TSentryGrantOption.TRUE) ? true : false;
-    return new HivePrivilegeInfo(principal, hivePrivilege, hivePrivilegeObject, grantor,
-        grantOption, (int) tPrivilege.getCreateTime());
-  }
-
-  /**
-   * Convert TSentryPrivilege to HivePrivilegeObject
-   *
-   * @param tSentryPrivilege
-   */
-  public static HivePrivilegeObject convert2HivePrivilegeObject(TSentryPrivilege tSentryPrivilege) {
-    HivePrivilegeObject privilege = null;
-    switch (PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope())) {
-      case SERVER:
-        privilege = new HivePrivilegeObject(HivePrivilegeObjectType.GLOBAL, "*", null);
-        break;
-      case DATABASE:
-        privilege =
-            new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, tSentryPrivilege.getDbName(),
-                null);
-        break;
-      case TABLE:
-        privilege =
-            new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW,
-                tSentryPrivilege.getDbName(), tSentryPrivilege.getTableName());
-        break;
-      case COLUMN:
-        privilege =
-            new HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, tSentryPrivilege.getDbName(),
-                tSentryPrivilege.getTableName(), null, tSentryPrivilege.getColumnName());
-        break;
-      case URI:
-        String uriString = tSentryPrivilege.getURI();
-        try {
-          uriString = uriString.replace("'", "").replace("\"", "");
-          HivePrivilegeObjectType type =
-              isLocalUri(uriString) ? HivePrivilegeObjectType.LOCAL_URI
-                  : HivePrivilegeObjectType.DFS_URI;
-          privilege = new HivePrivilegeObject(type, uriString, null);
-        } catch (URISyntaxException e1) {
-          throw new RuntimeException(uriString + "is not a URI");
-        }
-      default:
-        LOG.warn("Unknown PrivilegeScope: "
-            + PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope()));
-        break;
-    }
-    return privilege;
-  }
-
-  public static boolean isLocalUri(String uriString) throws URISyntaxException {
-    URI uri = new URI(uriString);
-    if (uri.getScheme().equalsIgnoreCase("file")) {
-      return true;
-    }
-
-    return false;
-  }
-
-  /**
-   * Convert TSentryRole to HiveRoleGrant
-   *
-   * @param role
-   */
-  public static HiveRoleGrant convert2HiveRoleGrant(TSentryRole role) {
-    HiveRoleGrant hiveRoleGrant = new HiveRoleGrant();
-    hiveRoleGrant.setRoleName(role.getRoleName());
-    hiveRoleGrant.setPrincipalName(role.getRoleName());
-    hiveRoleGrant.setPrincipalType(PrincipalType.ROLE.name());
-    hiveRoleGrant.setGrantOption(false);
-    hiveRoleGrant.setGrantor(role.getGrantorPrincipal());
-    hiveRoleGrant.setGrantorType(PrincipalType.USER.name());
-    return hiveRoleGrant;
-  }
-
-  /**
-   * Execute on failure hooks for e2e tests
-   *
-   * @param context
-   * @param conf
-   * @param hiveOp
-   */
-  public static void executeOnFailureHooks(SentryOnFailureHookContext hookCtx, Configuration conf) {
-    String csHooks =
-        conf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
-
-    try {
-      for (Hook aofh : SentryAuthorizerUtil.getHooks(csHooks)) {
-        ((SentryOnFailureHook) aofh).run(hookCtx);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-
-  /**
-   * Returns a set of hooks specified in a configuration variable.
-   *
-   * See getHooks(HiveAuthzConf.AuthzConfVars hookConfVar, Class<T> clazz)
-   *
-   * @param hookConfVar
-   * @return
-   * @throws Exception
-   */
-  public static List<Hook> getHooks(String csHooks) throws Exception {
-    return getHooks(csHooks, Hook.class);
-  }
-
-  /**
-   * Returns the hooks specified in a configuration variable. The hooks are returned in a list in
-   * the order they were specified in the configuration variable.
-   *
-   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
-   *        class names.
-   * @param clazz The super type of the hooks.
-   * @return A list of the hooks cast as the type specified in clazz, in the order they are listed
-   *         in the value of hookConfVar
-   * @throws Exception
-   */
-  public static <T extends Hook> List<T> getHooks(String csHooks, Class<T> clazz) throws Exception {
-
-    List<T> hooks = new ArrayList<T>();
-    if (csHooks.isEmpty()) {
-      return hooks;
-    }
-    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
-      try {
-        @SuppressWarnings("unchecked")
-        T hook = (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
-        hooks.add(hook);
-      } catch (ClassNotFoundException e) {
-        LOG.error(hookClass + " Class not found:" + e.getMessage());
-        throw e;
-      }
-    }
-
-    return hooks;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/e358fde7/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
deleted file mode 100644
index 85afe52..0000000
--- a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
+++ /dev/null
@@ -1,372 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.sentry.binding.hive.v2.util;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.core.model.db.Table;
-
-/**
- * Currently hive complier doesn't create read/write entities for some operations, e.g. create
- * table, drop table. This class is a simple semantic analyzer using regex, it is a workaround
- * approach to extract db_name and tb_name from those operations.
- */
-public class SimpleSemanticAnalyzer {
-  private String currentDb;
-  private String currentTb;
-
-  /**
-   * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name ...
-   */
-  private static final String CREATE_TABLE_REGEX = "^(CREATE)\\s+" + "(TEMPORARY\\s+)?"
-      + "(EXTERNAL\\s+)?" + "TABLE\\s+" + "(IF\\s+NOT\\s+EXISTS\\s+)?" + "([A-Za-z0-9._]+)";
-
-  /**
-   * DROP (DATABASE|SCHEMA) [IF EXISTS] database_name [RESTRICT|CASCADE];
-   */
-  private static final String DROP_DB_REGEX = "^DROP\\s+" + "(DATABASE|SCHEMA)\\s+"
-      + "(IF\\s+EXISTS\\s+)?" + "([A-Za-z0-9_]+)";
-
-  /**
-   * DROP TABLE [IF EXISTS] table_name;
-   */
-  private static final String DROP_TABLE_REGEX = "^DROP\\s+" + "TABLE\\s+" + "(IF\\s+EXISTS\\s+)?"
-      + "([A-Za-z0-9._]+)";
-
-  /**
-   * DROP VIEW [IF EXISTS] view_name;
-   */
-  private static final String DROP_VIEW_REGEX = "^DROP\\s+" + "VIEW\\s+" + "(IF\\s+EXISTS\\s+)?"
-      + "([A-Za-z0-9_].+)";
-
-  /**
-   * DESCRIBE DATABASE|SCHEMA [EXTENDED] db_name;
-   */
-  private static final String DESCRIBE_DB_REGEX = "^DESCRIBE\\s+" + "(DATABASE|SCHEMA)\\s+"
-      + "(EXTENDED\\s+)?" + "([A-Za-z0-9_]+)";
-
-  /**
-   * DESCRIBE [EXTENDED|FORMATTED] [db_name.]table_name[.col_name ( [.field_name] | [.'$elem$'] |
-   * [.'$key$'] | [.'$value$'] )* ];
-   */
-  private static final String DESCRIBE_TABLE_REGEX = "^DESCRIBE\\s+"
-      + "((EXTENDED|FORMATTED)\\s+)?" + "([A-Za-z0-9._]+)";
-
-  /**
-   * SHOW [FORMATTED] (INDEX|INDEXES) ON table_with_index [(FROM|IN) db_name];
-   */
-  private static final String SHOW_INDEX_REGEX = "^SHOW\\s+" + "(FORMATTED\\s+)?"
-      + "(INDEX|INDEXES)\\s+" + "ON\\s+" + "([A-Za-z0-9._]+)\\s*"
-      + "((FROM|IN)\\s+([A-Za-z0-9_]+))?";
-
-  /**
-   * SHOW TBLPROPERTIES tblname;
-   */
-  private static final String SHOW_TBLPROPERTIES_REGEX = "^SHOW\\s+" + "TBLPROPERTIES\\s+"
-      + "([A-Za-z0-9._]+)";
-
-  /**
-   * ALTER TABLE table_name ...
-   */
-  private static final String ALTER_TABLE_REGEX = "^ALTER\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * ALTER VIEW view_name ...
-   */
-  private static final String ALTER_VIEW_REGEX = "^ALTER\\s+" + "VIEW\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * MSCK REPAIR TABLE table_name;
-   */
-  private static final String MSCK_REGEX = "^MSCK\\s+" + "REPAIR\\s" + "TABLE\\s"
-      + "([A-Za-z0-9._]+)";
-
-  /**
-   * ALTER INDEX index_name ON table_name [PARTITION partition_spec] REBUILD;
-   */
-  private static final String ALTER_INDEX_REGEX = "^ALTER\\s+" + "INDEX\\s+"
-      + "([A-Za-z0-9_]+)\\s+" + "ON\\s" + "([A-Za-z0-9._]+)";
-
-  /**
-   * CREATE FUNCTION [db_name.]function_name AS class_name [USING JAR|FILE|ARCHIVE 'file_uri' [,
-   * JAR|FILE|ARCHIVE 'file_uri'] ];
-   */
-  private static final String CREATE_FUNCTION_REGEX = "^CREATE\\s+" + "(TEMPORARY\\s+)?"
-      + "FUNCTION\\s+" + "([A-Za-z0-9._]+)\\s+" + "AS\\s" + "([A-Za-z0-9._']+)";
-
-  /**
-   * SHOW COLUMNS FROM table_name
-   */
-  private static final String SHOWCOLUMNS = "^SHOW\\s+" + "COLUMNS\\s+" + "(FROM|IN)\\s+"
-      + "([A-Za-z0-9._]+)";
-
-  private static final String SHOW_TABLESTATUS = "^SHOW\\s+" + "TABLE\\s+" + "EXTENDED\\s+" + "IN\\s+"
-      + "([A-Za-z0-9._]+)";
-
-  private static final String LOAD = "^LOAD\\s+" + "DATA\\s+" + "(LOCAL\\s+)?" + "INPATH\\s+"
-      + "([A-Za-z0-9._':///-]+)" +"\\s" + "(OVERWRITE\\s+)?" + "INTO\\s" + "TABLE\\s" + "([A-Za-z0-9._]+)";
-
-  /**
-   * LOCK DATABASE dbname;
-   */
-  private static final String LOCKDB = "^LOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * UNLOCK DATABASE dbname;
-   */
-  private static final String UNLOCKDB = "^UNLOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * LOCK TABLE tblname;
-   */
-  private static final String LOCKTABLE = "^LOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * UNLOCK TABLE tblname;
-   */
-  private static final String UNLOCKTABLE = "^UNLOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
-
-  /**
-   * TRUNCATE TABLE tblname;
-   */
-  private static final String TRUNCATETABLE = "^TRUNCATE\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
-
-  private static Map<HiveOperation, String> OP_REGEX_MAP = new HashMap<HiveOperation, String>();
-  static {
-    // database metadata
-    OP_REGEX_MAP.put(HiveOperation.DROPDATABASE, DROP_DB_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.DESCDATABASE, DESCRIBE_DB_REGEX);
-
-    // table metadata
-    OP_REGEX_MAP.put(HiveOperation.CREATETABLE, CREATE_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.DROPTABLE, DROP_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.DROPVIEW, DROP_VIEW_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.DESCTABLE, DESCRIBE_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.SHOW_TBLPROPERTIES, SHOW_TBLPROPERTIES_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROPERTIES, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERDEPROPERTIES, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_FILEFORMAT, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_TOUCH, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMECOL, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDCOLS, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_REPLACECOLS, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMEPART, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ARCHIVE, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_UNARCHIVE, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERIALIZER, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_MERGEFILES, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SKEWED, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_DROPPARTS, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDPARTS, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAME, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_LOCATION, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_FILEFORMAT, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERIALIZER, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_MERGEFILES, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_LOCATION, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERTBLPART_SKEWED_LOCATION, ALTER_TABLE_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERVIEW_PROPERTIES, ALTER_VIEW_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.MSCK, MSCK_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_REBUILD, ALTER_INDEX_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_PROPS, ALTER_INDEX_REGEX);
-    OP_REGEX_MAP.put(HiveOperation.LOCKDB, LOCKDB);
-    OP_REGEX_MAP.put(HiveOperation.UNLOCKDB, UNLOCKDB);
-    OP_REGEX_MAP.put(HiveOperation.LOCKTABLE, LOCKTABLE);
-    OP_REGEX_MAP.put(HiveOperation.UNLOCKTABLE, UNLOCKTABLE);
-    OP_REGEX_MAP.put(HiveOperation.SHOWCOLUMNS, SHOWCOLUMNS);
-    OP_REGEX_MAP.put(HiveOperation.SHOW_TABLESTATUS, SHOW_TABLESTATUS);
-    OP_REGEX_MAP.put(HiveOperation.TRUNCATETABLE, TRUNCATETABLE);
-  }
-
-  public SimpleSemanticAnalyzer(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
-    currentDb = SessionState.get().getCurrentDatabase();
-    parse(hiveOp, cmd);
-  }
-
-  private void parse(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
-    switch (hiveOp) {
-      case DROPDATABASE:
-      case DESCDATABASE:
-      case LOCKDB:
-      case UNLOCKDB:
-        parseDbMeta(cmd, OP_REGEX_MAP.get(hiveOp));
-        break;
-      case DESCTABLE:
-      case CREATETABLE:
-      case DROPTABLE:
-      case DROPVIEW:
-      case SHOW_TBLPROPERTIES:
-        // alter table
-      case ALTERTABLE_PROPERTIES:
-      case ALTERTABLE_SERDEPROPERTIES:
-      case ALTERTABLE_CLUSTER_SORT:
-      case ALTERTABLE_FILEFORMAT:
-      case ALTERTABLE_TOUCH:
-      case ALTERTABLE_RENAMECOL:
-      case ALTERTABLE_ADDCOLS:
-      case ALTERTABLE_REPLACECOLS:
-      case ALTERTABLE_RENAMEPART:
-      case ALTERTABLE_ARCHIVE:
-      case ALTERTABLE_UNARCHIVE:
-      case ALTERTABLE_SERIALIZER:
-      case ALTERTABLE_MERGEFILES:
-      case ALTERTABLE_SKEWED:
-      case ALTERTABLE_DROPPARTS:
-      case ALTERTABLE_ADDPARTS:
-      case ALTERTABLE_RENAME:
-      case ALTERTABLE_LOCATION:
-        // alter view
-      case ALTERVIEW_PROPERTIES:
-        // alter partition
-      case ALTERPARTITION_FILEFORMAT:
-      case ALTERPARTITION_SERDEPROPERTIES:
-      case ALTERPARTITION_SERIALIZER:
-      case ALTERPARTITION_MERGEFILES:
-      case ALTERPARTITION_LOCATION:
-      case ALTERTBLPART_SKEWED_LOCATION:
-        // MSCK
-      case MSCK:
-        // alter index
-      case ALTERINDEX_REBUILD:
-      case ALTERINDEX_PROPS:
-      case LOCKTABLE:
-      case UNLOCKTABLE:
-      case SHOWCOLUMNS:
-      case TRUNCATETABLE:
-        parseTableMeta(cmd, OP_REGEX_MAP.get(hiveOp));
-        break;
-      case SHOWINDEXES:
-        parseShowIndex(cmd, SHOW_INDEX_REGEX);
-        break;
-      case CREATEFUNCTION:
-        parseFunction(cmd, CREATE_FUNCTION_REGEX);
-        break;
-      case SHOW_TABLESTATUS:
-        parseTableExtend(cmd, SHOW_TABLESTATUS);
-        break;
-      case LOAD:
-        parseLoadTable(cmd, LOAD);
-        break;
-      default:
-        break;
-    }
-  }
-
-  private void parseLoadTable(String cmd, String load) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(load, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      String tbName = matcher.group(matcher.groupCount());
-      extractDbAndTb(tbName.trim());
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd + " does not match the table meta grammar");
-    }
-  }
-
-  private void parseTableExtend(String cmd, String showTablestatus) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(showTablestatus, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      String dbName = matcher.group(matcher.groupCount());
-      currentDb = dbName;
-      currentTb = Table.SOME.getName();
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd + " does not match the table meta grammar");
-    }
-  }
-
-  private void extractDbAndTb(String tableName) {
-    if (tableName.contains(".")) {
-      String[] tb = tableName.split("\\.");
-      currentDb = tb[0];
-      currentTb = tb[1];
-    } else {
-      currentDb = SessionState.get().getCurrentDatabase();
-      currentTb = tableName;
-    }
-  }
-
-  private void parseDbMeta(String cmd, String regex) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      currentDb = matcher.group(matcher.groupCount());
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd
-          + " does not match the database meta grammar");
-    }
-  }
-
-  private void parseTableMeta(String cmd, String regex) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      String tbName = matcher.group(matcher.groupCount());
-      extractDbAndTb(tbName.trim());
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd + " does not match the table meta grammar");
-    }
-  }
-
-  private void parseShowIndex(String cmd, String regex) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      String dbName = matcher.group(matcher.groupCount());
-      String tbName = matcher.group(3);
-      if (dbName != null) {
-        currentDb = dbName;
-        currentTb = tbName;
-      } else {
-        extractDbAndTb(tbName);
-      }
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd + " does not match the show index grammar");
-    }
-  }
-
-  private void parseFunction(String cmd, String regex) throws HiveAuthzPluginException {
-    Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
-    Matcher matcher = pattern.matcher(cmd);
-    if (matcher.find()) {
-      String udfClass = matcher.group(matcher.groupCount());
-      if (udfClass.contains("'")) {
-        currentTb = udfClass.split("'")[1];
-      } else {
-        currentTb = udfClass;
-      }
-    } else {
-      throw new HiveAuthzPluginException("this command " + cmd
-          + " does not match the create function grammar");
-    }
-  }
-
-  public String getCurrentDb() {
-    return currentDb;
-  }
-
-  public String getCurrentTb() {
-    return currentTb;
-  }
-
-}