You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sd...@apache.org on 2016/03/23 03:22:32 UTC

[1/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)

Repository: sentry
Updated Branches:
  refs/heads/master 4643f988a -> 7a30c819c


http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
deleted file mode 100644
index 616d46c..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
+++ /dev/null
@@ -1,622 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive.authz;
-
-import java.security.CodeSource;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.Parser;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.sentry.Command;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
-import org.apache.sentry.binding.hive.SentryPolicyFileFormatFactory;
-import org.apache.sentry.binding.hive.SentryPolicyFileFormatter;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.core.common.SentryConfigurationException;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.provider.common.AuthorizationProvider;
-import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
-import org.apache.sentry.service.thrift.SentryServiceClientFactory;
-
-/**
- * set the required system property to be read by HiveConf and AuthzConf
- * 
- * @throws Exception
- */
-// Hack, hiveConf doesn't provide a reliable way check if it found a valid
-// hive-site
-// load auth provider
-// get the configured sentry provider
-// validate policy files
-// import policy files
-public class SentryConfigTool {
-  private String sentrySiteFile = null;
-  private String policyFile = null;
-  private String query = null;
-  private String jdbcURL = null;
-  private String user = null;
-  private String passWord = null;
-  private String importPolicyFilePath = null;
-  private String exportPolicyFilePath = null;
-  private boolean listPrivs = false;
-  private boolean validate = false;
-  private boolean importOverwriteRole = false;
-  private HiveConf hiveConf = null;
-  private HiveAuthzConf authzConf = null;
-  private AuthorizationProvider sentryProvider = null;
-
-  public SentryConfigTool() {
-
-  }
-
-  public AuthorizationProvider getSentryProvider() {
-    return sentryProvider;
-  }
-
-  public void setSentryProvider(AuthorizationProvider sentryProvider) {
-    this.sentryProvider = sentryProvider;
-  }
-
-  public HiveConf getHiveConf() {
-    return hiveConf;
-  }
-
-  public void setHiveConf(HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
-  }
-
-  public HiveAuthzConf getAuthzConf() {
-    return authzConf;
-  }
-
-  public void setAuthzConf(HiveAuthzConf authzConf) {
-    this.authzConf = authzConf;
-  }
-
-  public boolean isValidate() {
-    return validate;
-  }
-
-  public void setValidate(boolean validate) {
-    this.validate = validate;
-  }
-
-  public String getImportPolicyFilePath() {
-    return importPolicyFilePath;
-  }
-
-  public void setImportPolicyFilePath(String importPolicyFilePath) {
-    this.importPolicyFilePath = importPolicyFilePath;
-  }
-
-  public String getExportPolicyFilePath() {
-    return exportPolicyFilePath;
-  }
-
-  public void setExportPolicyFilePath(String exportPolicyFilePath) {
-    this.exportPolicyFilePath = exportPolicyFilePath;
-  }
-
-  public String getSentrySiteFile() {
-    return sentrySiteFile;
-  }
-
-  public void setSentrySiteFile(String sentrySiteFile) {
-    this.sentrySiteFile = sentrySiteFile;
-  }
-
-  public String getPolicyFile() {
-    return policyFile;
-  }
-
-  public void setPolicyFile(String policyFile) {
-    this.policyFile = policyFile;
-  }
-
-  public String getQuery() {
-    return query;
-  }
-
-  public void setQuery(String query) {
-    this.query = query;
-  }
-
-  public String getJdbcURL() {
-    return jdbcURL;
-  }
-
-  public void setJdbcURL(String jdbcURL) {
-    this.jdbcURL = jdbcURL;
-  }
-
-  public String getUser() {
-    return user;
-  }
-
-  public void setUser(String user) {
-    this.user = user;
-  }
-
-  public String getPassWord() {
-    return passWord;
-  }
-
-  public void setPassWord(String passWord) {
-    this.passWord = passWord;
-  }
-
-  public boolean isListPrivs() {
-    return listPrivs;
-  }
-
-  public void setListPrivs(boolean listPrivs) {
-    this.listPrivs = listPrivs;
-  }
-
-  public boolean isImportOverwriteRole() {
-    return importOverwriteRole;
-  }
-
-  public void setImportOverwriteRole(boolean importOverwriteRole) {
-    this.importOverwriteRole = importOverwriteRole;
-  }
-
-  /**
-   * set the required system property to be read by HiveConf and AuthzConf
-   * @throws Exception
-   */
-  public void setupConfig() throws Exception {
-    System.out.println("Configuration: ");
-    CodeSource src = SentryConfigTool.class.getProtectionDomain()
-        .getCodeSource();
-    if (src != null) {
-      System.out.println("Sentry package jar: " + src.getLocation());
-    }
-
-    if (getPolicyFile() != null) {
-      System.setProperty(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(),
-          getPolicyFile());
-    }
-    System.setProperty(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true");
-    setHiveConf(new HiveConf(SessionState.class));
-    getHiveConf().setVar(ConfVars.SEMANTIC_ANALYZER_HOOK,
-        HiveAuthzBindingHook.class.getName());
-    try {
-      System.out.println("Hive config: " + HiveConf.getHiveSiteLocation());
-    } catch (NullPointerException e) {
-      // Hack, hiveConf doesn't provide a reliable way check if it found a valid
-      // hive-site
-      throw new SentryConfigurationException("Didn't find a hive-site.xml");
-
-    }
-
-    if (getSentrySiteFile() != null) {
-      getHiveConf()
-          .set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, getSentrySiteFile());
-    }
-
-    setAuthzConf(HiveAuthzConf.getAuthzConf(getHiveConf()));
-    System.out.println("Sentry config: "
-        + getAuthzConf().getHiveAuthzSiteFile());
-    System.out.println("Sentry Policy: "
-        + getAuthzConf().get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar()));
-    System.out.println("Sentry server: "
-        + getAuthzConf().get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-
-    setSentryProvider(getAuthorizationProvider());
-  }
-
-  // load auth provider
-  private AuthorizationProvider getAuthorizationProvider()
-      throws IllegalStateException, SentryConfigurationException {
-    String serverName = new Server(getAuthzConf().get(
-        AuthzConfVars.AUTHZ_SERVER_NAME.getVar())).getName();
-    // get the configured sentry provider
-    AuthorizationProvider sentryProvider = null;
-    try {
-      sentryProvider = HiveAuthzBinding.getAuthProvider(getHiveConf(),
-          authzConf, serverName);
-    } catch (SentryConfigurationException eC) {
-      printConfigErrors(eC);
-    } catch (Exception e) {
-      throw new IllegalStateException("Couldn't load sentry provider ", e);
-    }
-    return sentryProvider;
-  }
-
-  // validate policy files
-  public void validatePolicy() throws Exception {
-    try {
-      getSentryProvider().validateResource(true);
-    } catch (SentryConfigurationException e) {
-      printConfigErrors(e);
-    }
-    System.out.println("No errors found in the policy file");
-  }
-
-  // import the sentry mapping data to database
-  public void importPolicy() throws Exception {
-    String requestorUserName = System.getProperty("user.name", "");
-    // get the FileFormatter according to the configuration
-    SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
-        .createFileFormatter(authzConf);
-    // parse the input file, get the mapping data in map structure
-    Map<String, Map<String, Set<String>>> policyFileMappingData = sentryPolicyFileFormatter.parse(
-        importPolicyFilePath, authzConf);
-    // todo: here should be an validator to check the data's value, format, hierarchy
-    SentryPolicyServiceClient client = SentryServiceClientFactory.create(getAuthzConf());
-    // import the mapping data to database
-    client.importPolicy(policyFileMappingData, requestorUserName, importOverwriteRole);
-  }
-
-  // export the sentry mapping data to file
-  public void exportPolicy() throws Exception {
-    String requestorUserName = System.getProperty("user.name", "");
-    SentryPolicyServiceClient client = SentryServiceClientFactory.create(getAuthzConf());
-    // export the sentry mapping data from database to map structure
-    Map<String, Map<String, Set<String>>> policyFileMappingData = client
-        .exportPolicy(requestorUserName);
-    // get the FileFormatter according to the configuration
-    SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
-        .createFileFormatter(authzConf);
-    // write the sentry mapping data to exportPolicyFilePath with the data in map structure
-    sentryPolicyFileFormatter.write(exportPolicyFilePath, policyFileMappingData);
-  }
-
-  // list permissions for given user
-  public void listPrivs() throws Exception {
-    getSentryProvider().validateResource(true);
-    System.out.println("Available privileges for user " + getUser() + ":");
-    Set<String> permList = getSentryProvider().listPrivilegesForSubject(
-        new Subject(getUser()));
-    for (String perms : permList) {
-      System.out.println("\t" + perms);
-    }
-    if (permList.isEmpty()) {
-      System.out.println("\t*** No permissions available ***");
-    }
-  }
-
-  // Verify the given query
-  public void verifyLocalQuery(String queryStr) throws Exception {
-    // setup Hive driver
-    SessionState session = new SessionState(getHiveConf());
-    SessionState.start(session);
-    Driver driver = new Driver(session.getConf(), getUser());
-
-    // compile the query
-    CommandProcessorResponse compilerStatus = driver
-        .compileAndRespond(queryStr);
-    if (compilerStatus.getResponseCode() != 0) {
-      String errMsg = compilerStatus.getErrorMessage();
-      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
-        printMissingPerms(getHiveConf().get(
-            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
-      }
-      throw new SemanticException("Compilation error: "
-          + compilerStatus.getErrorMessage());
-    }
-    driver.close();
-    System.out
-        .println("User " + getUser() + " has privileges to run the query");
-  }
-
-  // connect to remote HS2 and run mock query
-  public void verifyRemoteQuery(String queryStr) throws Exception {
-    Class.forName("org.apache.hive.jdbc.HiveDriver");
-    Connection conn = DriverManager.getConnection(getJdbcURL(), getUser(),
-        getPassWord());
-    Statement stmt = conn.createStatement();
-    if (!isSentryEnabledOnHiveServer(stmt)) {
-      throw new IllegalStateException("Sentry is not enabled on HiveServer2");
-    }
-    stmt.execute("set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + "=true");
-    try {
-      stmt.execute(queryStr);
-    } catch (SQLException e) {
-      String errMsg = e.getMessage();
-      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR)) {
-        System.out.println("User "
-            + readConfig(stmt, HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME)
-            + " has privileges to run the query");
-        return;
-      } else if (errMsg
-          .contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
-        printMissingPerms(readConfig(stmt,
-            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
-        throw e;
-      } else {
-        throw e;
-      }
-    } finally {
-      if (!stmt.isClosed()) {
-        stmt.close();
-      }
-      conn.close();
-    }
-
-  }
-
-  // verify senty session hook is set
-  private boolean isSentryEnabledOnHiveServer(Statement stmt)
-      throws SQLException {
-    String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase();
-    return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase())
-        && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase());
-  }
-
-  // read a config value using 'set' statement
-  private String readConfig(Statement stmt, String configKey)
-      throws SQLException {
-    ResultSet res = stmt.executeQuery("set " + configKey);
-    if (!res.next()) {
-      return null;
-    }
-    // parse key=value result format
-    String result = res.getString(1);
-    res.close();
-    return result.substring(result.indexOf("=") + 1);
-  }
-
-  // print configuration/policy file errors and warnings
-  private void printConfigErrors(SentryConfigurationException configException)
-      throws SentryConfigurationException {
-    System.out.println(" *** Found configuration problems *** ");
-    for (String errMsg : configException.getConfigErrors()) {
-      System.out.println("ERROR: " + errMsg);
-    }
-    for (String warnMsg : configException.getConfigWarnings()) {
-      System.out.println("Warning: " + warnMsg);
-    }
-    throw configException;
-  }
-
-  // extract the authorization errors from config property and print
-  private void printMissingPerms(String errMsg) {
-    if (errMsg == null || errMsg.isEmpty()) {
-      return;
-    }
-    System.out.println("*** Query compilation failed ***");
-    String perms[] = errMsg.replaceFirst(
-        ".*" + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE, "")
-        .split(";");
-    System.out.println("Required privileges for given query:");
-    for (int count = 0; count < perms.length; count++) {
-      System.out.println(" \t " + perms[count]);
-    }
-  }
-
-  // print usage
-  private void usage(Options sentryOptions) {
-    HelpFormatter formatter = new HelpFormatter();
-    formatter.printHelp("sentry --command config-tool", sentryOptions);
-    System.exit(-1);
-  }
-
-  /**
-   * parse arguments
-   * 
-   * <pre>
-   *   -d,--debug                  Enable debug output
-   *   -e,--query <arg>            Query privilege verification, requires -u
-   *   -h,--help                   Print usage
-   *   -i,--policyIni <arg>        Policy file path
-   *   -j,--jdbcURL <arg>          JDBC URL
-   *   -l,--listPrivs,--listPerms  List privilges for given user, requires -u
-   *   -p,--password <arg>         Password
-   *   -s,--sentry-site <arg>      sentry-site file path
-   *   -u,--user <arg>             user name
-   *   -v,--validate               Validate policy file
-   *   -I,--import                 Import policy file
-   *   -E,--export                 Export policy file
-   *   -o,--overwrite              Overwrite the exist role data when do the import
-   * </pre>
-   * 
-   * @param args
-   */
-  private void parseArgs(String[] args) {
-    boolean enableDebug = false;
-
-    Options sentryOptions = new Options();
-
-    Option helpOpt = new Option("h", "help", false, "Print usage");
-    helpOpt.setRequired(false);
-
-    Option validateOpt = new Option("v", "validate", false,
-        "Validate policy file");
-    validateOpt.setRequired(false);
-
-    Option queryOpt = new Option("e", "query", true,
-        "Query privilege verification, requires -u");
-    queryOpt.setRequired(false);
-
-    Option listPermsOpt = new Option("l", "listPerms", false,
-        "list permissions for given user, requires -u");
-    listPermsOpt.setRequired(false);
-    Option listPrivsOpt = new Option("listPrivs", false,
-        "list privileges for given user, requires -u");
-    listPrivsOpt.setRequired(false);
-
-    Option importOpt = new Option("I", "import", true,
-        "Import policy file");
-    importOpt.setRequired(false);
-
-    Option exportOpt = new Option("E", "export", true, "Export policy file");
-    exportOpt.setRequired(false);
-    // required args
-    OptionGroup sentryOptGroup = new OptionGroup();
-    sentryOptGroup.addOption(helpOpt);
-    sentryOptGroup.addOption(validateOpt);
-    sentryOptGroup.addOption(queryOpt);
-    sentryOptGroup.addOption(listPermsOpt);
-    sentryOptGroup.addOption(listPrivsOpt);
-    sentryOptGroup.addOption(importOpt);
-    sentryOptGroup.addOption(exportOpt);
-    sentryOptGroup.setRequired(true);
-    sentryOptions.addOptionGroup(sentryOptGroup);
-
-    // optional args
-    Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL");
-    jdbcArg.setRequired(false);
-    sentryOptions.addOption(jdbcArg);
-
-    Option sentrySitePath = new Option("s", "sentry-site", true,
-        "sentry-site file path");
-    sentrySitePath.setRequired(false);
-    sentryOptions.addOption(sentrySitePath);
-
-    Option globalPolicyPath = new Option("i", "policyIni", true,
-        "Policy file path");
-    globalPolicyPath.setRequired(false);
-    sentryOptions.addOption(globalPolicyPath);
-
-    Option userOpt = new Option("u", "user", true, "user name");
-    userOpt.setRequired(false);
-    sentryOptions.addOption(userOpt);
-
-    Option passWordOpt = new Option("p", "password", true, "Password");
-    userOpt.setRequired(false);
-    sentryOptions.addOption(passWordOpt);
-
-    Option debugOpt = new Option("d", "debug", false, "enable debug output");
-    debugOpt.setRequired(false);
-    sentryOptions.addOption(debugOpt);
-
-    Option overwriteOpt = new Option("o", "overwrite", false, "enable import overwrite");
-    overwriteOpt.setRequired(false);
-    sentryOptions.addOption(overwriteOpt);
-
-    try {
-      Parser parser = new GnuParser();
-      CommandLine cmd = parser.parse(sentryOptions, args);
-
-      for (Option opt : cmd.getOptions()) {
-        if (opt.getOpt().equals("s")) {
-          setSentrySiteFile(opt.getValue());
-        } else if (opt.getOpt().equals("i")) {
-          setPolicyFile(opt.getValue());
-        } else if (opt.getOpt().equals("e")) {
-          setQuery(opt.getValue());
-        } else if (opt.getOpt().equals("j")) {
-          setJdbcURL(opt.getValue());
-        } else if (opt.getOpt().equals("u")) {
-          setUser(opt.getValue());
-        } else if (opt.getOpt().equals("p")) {
-          setPassWord(opt.getValue());
-        } else if (opt.getOpt().equals("l") || opt.getOpt().equals("listPrivs")) {
-          setListPrivs(true);
-        } else if (opt.getOpt().equals("v")) {
-          setValidate(true);
-        } else if (opt.getOpt().equals("I")) {
-          setImportPolicyFilePath(opt.getValue());
-        } else if (opt.getOpt().equals("E")) {
-          setExportPolicyFilePath(opt.getValue());
-        } else if (opt.getOpt().equals("h")) {
-          usage(sentryOptions);
-        } else if (opt.getOpt().equals("d")) {
-          enableDebug = true;
-        } else if (opt.getOpt().equals("o")) {
-          setImportOverwriteRole(true);
-        }
-      }
-
-      if (isListPrivs() && getUser() == null) {
-        throw new ParseException("Can't use -l without -u ");
-      }
-      if (getQuery() != null && getUser() == null) {
-        throw new ParseException("Must use -u with -e ");
-      }
-    } catch (ParseException e1) {
-      usage(sentryOptions);
-    }
-
-    if (!enableDebug) {
-      // turn off log
-      LogManager.getRootLogger().setLevel(Level.OFF);
-    }
-  }
-
-  public static class CommandImpl implements Command {
-    @Override
-    public void run(String[] args) throws Exception {
-      SentryConfigTool sentryTool = new SentryConfigTool();
-
-      try {
-        // parse arguments
-        sentryTool.parseArgs(args);
-
-        // load configuration
-        sentryTool.setupConfig();
-
-        // validate configuration
-        if (sentryTool.isValidate()) {
-          sentryTool.validatePolicy();
-        }
-
-        if (!StringUtils.isEmpty(sentryTool.getImportPolicyFilePath())) {
-          sentryTool.importPolicy();
-        }
-
-        if (!StringUtils.isEmpty(sentryTool.getExportPolicyFilePath())) {
-          sentryTool.exportPolicy();
-        }
-
-        // list permissions for give user
-        if (sentryTool.isListPrivs()) {
-          sentryTool.listPrivs();
-        }
-
-        // verify given query
-        if (sentryTool.getQuery() != null) {
-          if (sentryTool.getJdbcURL() != null) {
-            sentryTool.verifyRemoteQuery(sentryTool.getQuery());
-          } else {
-            sentryTool.verifyLocalQuery(sentryTool.getQuery());
-          }
-        }
-      } catch (Exception e) {
-        System.out.println("Sentry tool reported Errors: " + e.getMessage());
-        e.printStackTrace(System.out);
-        System.exit(1);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
deleted file mode 100644
index 5a89af2..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.conf;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class HiveAuthzConf extends Configuration {
-
-  /**
-   * Configuration key used in hive-site.xml to point at sentry-site.xml
-   */
-  public static final String HIVE_ACCESS_CONF_URL = "hive.access.conf.url";
-  public static final String HIVE_SENTRY_CONF_URL = "hive.sentry.conf.url";
-  public static final String HIVE_ACCESS_SUBJECT_NAME = "hive.access.subject.name";
-  public static final String HIVE_SENTRY_SUBJECT_NAME = "hive.sentry.subject.name";
-  public static final String HIVE_SENTRY_AUTH_ERRORS = "sentry.hive.authorization.errors";
-  public static final String HIVE_SENTRY_MOCK_COMPILATION = "sentry.hive.mock.compilation";
-  public static final String HIVE_SENTRY_MOCK_ERROR = "sentry.hive.mock.error";
-  public static final String HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE = "No valid privileges";
-  /**
-   * Property used to persist the role set in the session. This is not public for now.
-   */
-  public static final String SENTRY_ACTIVE_ROLE_SET = "hive.sentry.active.role.set";
-
-  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST =
-      "hive.sentry.security.command.whitelist";
-  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT =
-      "set,reset,reload";
-
-  public static final String HIVE_SENTRY_SERDE_WHITELIST = "hive.sentry.serde.whitelist";
-  public static final String HIVE_SENTRY_SERDE_WHITELIST_DEFAULT = "org.apache.hadoop.hive.serde2";
-
-  // Disable the serde Uri privileges by default for backward compatibilities.
-  public static final String HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED = "hive.sentry.turn.on.serde.uri.privileges";
-  public static final boolean HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT = false;
-
-  public static final String HIVE_UDF_WHITE_LIST =
-      "concat,substr,substring,space,repeat,ascii,lpad,rpad,size,round,floor,sqrt,ceil," +
-          "ceiling,rand,abs,pmod,ln,log2,sin,asin,cos,acos,log10,log,exp,power,pow,sign,pi," +
-          "degrees,radians,atan,tan,e,conv,bin,hex,unhex,base64,unbase64,encode,decode,upper," +
-          "lower,ucase,lcase,trim,ltrim,rtrim,length,reverse,field,find_in_set,initcap,like," +
-          "rlike,regexp,regexp_replace,regexp_extract,parse_url,nvl,split,str_to_map,translate" +
-          ",positive,negative,day,dayofmonth,month,year,hour,minute,second,from_unixtime," +
-          "to_date,weekofyear,last_day,date_add,date_sub,datediff,add_months,get_json_object," +
-          "xpath_string,xpath_boolean,xpath_number,xpath_double,xpath_float,xpath_long," +
-          "xpath_int,xpath_short,xpath,+,-,*,/,%,div,&,|,^,~,current_database,isnull," +
-          "isnotnull,if,in,and,or,=,==,<=>,!=,<>,<,<=,>,>=,not,!,between,ewah_bitmap_and," +
-          "ewah_bitmap_or,ewah_bitmap_empty,boolean,tinyint,smallint,int,bigint,float,double," +
-          "string,date,timestamp,binary,decimal,varchar,char,max,min,sum,count,avg,std,stddev," +
-          "stddev_pop,stddev_samp,variance,var_pop,var_samp,covar_pop,covar_samp,corr," +
-          "histogram_numeric,percentile_approx,collect_set,collect_list,ngrams," +
-          "context_ngrams,ewah_bitmap,compute_stats,percentile," +
-          "array,assert_true,map,struct,named_struct,create_union,case,when,hash,coalesce," +
-          "index,in_file,instr,locate,elt,concat_ws,sort_array," +
-          "array_contains,sentences,map_keys,map_values,format_number,printf,greatest,least," +
-          "from_utc_timestamp,to_utc_timestamp,unix_timestamp,to_unix_timestamp,explode," +
-          "inline,json_tuple,parse_url_tuple,posexplode,stack,lead,lag,row_number,rank," +
-          "dense_rank,percent_rank,cume_dist,ntile,first_value,last_value,noop,noopwithmap," +
-          "noopstreaming,noopwithmapstreaming,windowingtablefunction,matchpath";
-
-  public static final String HIVE_UDF_BLACK_LIST = "reflect,reflect2,java_method";
-
-  /**
-   * Config setting definitions
-   */
-  public static enum AuthzConfVars {
-    AUTHZ_PROVIDER("sentry.provider",
-      "org.apache.sentry.provider.common.HadoopGroupResourceAuthorizationProvider"),
-    AUTHZ_PROVIDER_RESOURCE("sentry.hive.provider.resource", ""),
-    AUTHZ_PROVIDER_BACKEND("sentry.hive.provider.backend", "org.apache.sentry.provider.file.SimpleFileProviderBackend"),
-    AUTHZ_POLICY_ENGINE("sentry.hive.policy.engine", "org.apache.sentry.policy.db.SimpleDBPolicyEngine"),
-    AUTHZ_POLICY_FILE_FORMATTER(
-        "sentry.hive.policy.file.formatter",
-        "org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter"),
-    AUTHZ_SERVER_NAME("sentry.hive.server", ""),
-    AUTHZ_RESTRICT_DEFAULT_DB("sentry.hive.restrict.defaultDB", "false"),
-    SENTRY_TESTING_MODE("sentry.hive.testing.mode", "false"),
-    AUTHZ_ALLOW_HIVE_IMPERSONATION("sentry.hive.allow.hive.impersonation", "false"),
-    AUTHZ_ONFAILURE_HOOKS("sentry.hive.failure.hooks", ""),
-    AUTHZ_METASTORE_SERVICE_USERS("sentry.metastore.service.users", null),
-    AUTHZ_SYNC_ALTER_WITH_POLICY_STORE("sentry.hive.sync.alter", "true"),
-    AUTHZ_SYNC_CREATE_WITH_POLICY_STORE("sentry.hive.sync.create", "false"),
-    AUTHZ_SYNC_DROP_WITH_POLICY_STORE("sentry.hive.sync.drop", "true"),
-
-    AUTHZ_PROVIDER_DEPRECATED("hive.sentry.provider",
-      "org.apache.sentry.provider.file.ResourceAuthorizationProvider"),
-    AUTHZ_PROVIDER_RESOURCE_DEPRECATED("hive.sentry.provider.resource", ""),
-    AUTHZ_SERVER_NAME_DEPRECATED("hive.sentry.server", ""),
-    AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED("hive.sentry.restrict.defaultDB", "false"),
-    SENTRY_TESTING_MODE_DEPRECATED("hive.sentry.testing.mode", "false"),
-    AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED("hive.sentry.allow.hive.impersonation", "false"),
-    AUTHZ_ONFAILURE_HOOKS_DEPRECATED("hive.sentry.failure.hooks", "");
-
-    private final String varName;
-    private final String defaultVal;
-
-    AuthzConfVars(String varName, String defaultVal) {
-      this.varName = varName;
-      this.defaultVal = defaultVal;
-    }
-
-    public String getVar() {
-      return varName;
-    }
-
-    public String getDefault() {
-      return defaultVal;
-    }
-
-    public static String getDefault(String varName) {
-      for (AuthzConfVars oneVar : AuthzConfVars.values()) {
-        if(oneVar.getVar().equalsIgnoreCase(varName)) {
-          return oneVar.getDefault();
-        }
-      }
-      return null;
-    }
-  }
-
-  // map of current property names - > deprecated property names.
-  // The binding layer code should work if the deprecated property names are provided,
-  // as long as the new property names aren't also provided.  Since the binding code
-  // only calls the new property names, we require a map from current names to deprecated
-  // names in order to check if the deprecated name of a property was set.
-  private static final Map<String, AuthzConfVars> currentToDeprecatedProps =
-      new HashMap<String, AuthzConfVars>();
-  static {
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER.getVar(), AuthzConfVars.AUTHZ_PROVIDER_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(), AuthzConfVars.AUTHZ_PROVIDER_RESOURCE_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_SERVER_NAME.getVar(), AuthzConfVars.AUTHZ_SERVER_NAME_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), AuthzConfVars.SENTRY_TESTING_MODE_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar(), AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED);
-    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), AuthzConfVars.AUTHZ_ONFAILURE_HOOKS_DEPRECATED);
-  };
-
-  private static final Logger LOG = LoggerFactory
-      .getLogger(HiveAuthzConf.class);
-  public static final String AUTHZ_SITE_FILE = "sentry-site.xml";
-  private final String hiveAuthzSiteFile;
-
-  public HiveAuthzConf(URL hiveAuthzSiteURL) {
-    super();
-    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
-    addResource(hiveAuthzSiteURL);
-    applySystemProperties();
-    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
-    this.hiveAuthzSiteFile = hiveAuthzSiteURL.toString();
-  }
-  /**
-   * Apply system properties to this object if the property name is defined in ConfVars
-   * and the value is non-null and not an empty string.
-   */
-  private void applySystemProperties() {
-    Map<String, String> systemProperties = getConfSystemProperties();
-    for (Entry<String, String> systemProperty : systemProperties.entrySet()) {
-      this.set(systemProperty.getKey(), systemProperty.getValue());
-    }
-  }
-
-  /**
-   * This method returns a mapping from config variable name to its value for all config variables
-   * which have been set using System properties
-   */
-  public static Map<String, String> getConfSystemProperties() {
-    Map<String, String> systemProperties = new HashMap<String, String>();
-
-    for (AuthzConfVars oneVar : AuthzConfVars.values()) {
-      String value = System.getProperty(oneVar.getVar());
-      if (value != null && value.length() > 0) {
-        systemProperties.put(oneVar.getVar(), value);
-      }
-    }
-    return systemProperties;
-  }
-
-  @Override
-  public String get(String varName) {
-    return get(varName, null);
-  }
-
-  @Override
-  public String get(String varName, String defaultVal) {
-    String retVal = super.get(varName);
-    if (retVal == null) {
-      // check if the deprecated value is set here
-      if (currentToDeprecatedProps.containsKey(varName)) {
-        retVal = super.get(currentToDeprecatedProps.get(varName).getVar());
-      }
-      if (retVal == null) {
-        retVal = AuthzConfVars.getDefault(varName);
-      } else {
-        LOG.warn("Using the deprecated config setting " + currentToDeprecatedProps.get(varName).getVar() +
-            " instead of " + varName);
-      }
-    }
-    if (retVal == null) {
-      retVal = defaultVal;
-    }
-    return retVal;
-  }
-
-  public String getHiveAuthzSiteFile() {
-    return hiveAuthzSiteFile;
-  }
-
-  /**
-   * Extract the authz config file path from given hive conf and load the authz config
-   * @param hiveConf
-   * @return
-   * @throws IllegalArgumentException
-   */
-  public static HiveAuthzConf getAuthzConf(HiveConf hiveConf)
-    throws IllegalArgumentException {
-    boolean depreicatedConfigFile = false;
-
-    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-    if (hiveAuthzConf == null
-        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
-      depreicatedConfigFile = true;
-    }
-
-    if (hiveAuthzConf == null
-        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      throw new IllegalArgumentException("Configuration key "
-          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-          + "' is invalid.");
-    }
-
-    try {
-      return new HiveAuthzConf(new URL(hiveAuthzConf));
-    } catch (MalformedURLException e) {
-      if (depreicatedConfigFile) {
-        throw new IllegalArgumentException("Configuration key "
-            + HiveAuthzConf.HIVE_ACCESS_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
-      } else {
-        throw new IllegalArgumentException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
deleted file mode 100644
index b658922..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.conf;
-
-public class InvalidConfigurationException extends Exception
-{
-  private static final long serialVersionUID = 1L;
-
-  //Parameterless Constructor
-  public InvalidConfigurationException() {}
-
-  //Constructor that accepts a message
-  public InvalidConfigurationException(String message)
-  {
-    super(message);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
index 37781b9..9e08571 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.shims.Utils;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
 import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
@@ -285,7 +285,7 @@ public class AuthorizingObjectStore extends ObjectStore {
       throws MetaException {
     if (needsAuthorization(getUserName())) {
       try {
-        return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
+        return HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(),
             dbList, HiveOperation.SHOWDATABASES, getUserName());
       } catch (SemanticException e) {
         throw new MetaException("Error getting DB list " + e.getMessage());
@@ -306,7 +306,7 @@ public class AuthorizingObjectStore extends ObjectStore {
       throws MetaException {
     if (needsAuthorization(getUserName())) {
       try {
-        return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
+        return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(),
             tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
       } catch (SemanticException e) {
         throw new MetaException("Error getting Table list " + e.getMessage());

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
index 14c31a4..d741c44 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBinding.java
@@ -17,52 +17,20 @@
  */
 package org.apache.sentry.binding.metastore;
 
-import java.io.File;
 import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.ArrayList;
 import java.util.List;
-import java.util.Set;
 
 import javax.security.auth.login.LoginException;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
-import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
-import org.apache.hadoop.hive.metastore.events.PreEventContext;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.shims.Utils;
 import org.apache.sentry.SentryUserException;
 import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
 import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessURI;
 import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.core.model.db.Table;
-
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
 
 /**
  * Sentry binding for Hive Metastore. The binding is integrated into Metastore
@@ -73,334 +41,16 @@ import com.google.common.collect.Sets;
  * passed down to the hive binding which handles the authorization. This ensures
  * that we follow the same privilege model and policies.
  */
-public class MetastoreAuthzBinding extends MetaStorePreEventListener {
-
-  /**
-   * Build the set of object hierarchies ie fully qualified db model objects
-   */
-  protected static class HierarcyBuilder {
-    private List<List<DBModelAuthorizable>> authHierarchy;
-
-    public HierarcyBuilder() {
-      authHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-    }
-
-    public HierarcyBuilder addServerToOutput(Server server) {
-      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
-      serverHierarchy.add(server);
-      authHierarchy.add(serverHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addDbToOutput(Server server, String dbName) {
-      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
-      addServerToOutput(server);
-      dbHierarchy.add(server);
-      dbHierarchy.add(new Database(dbName));
-      authHierarchy.add(dbHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addUriToOutput(Server server, String uriPath,
-        String warehouseDirPath) throws MetaException {
-      List<DBModelAuthorizable> uriHierarchy = new ArrayList<DBModelAuthorizable>();
-      addServerToOutput(server);
-      uriHierarchy.add(server);
-      try {
-        uriHierarchy.add(new AccessURI(PathUtils.parseDFSURI(warehouseDirPath,
-            uriPath)));
-      } catch (URISyntaxException e) {
-        throw new MetaException("Error paring the URI " + e.getMessage());
-      }
-      authHierarchy.add(uriHierarchy);
-      return this;
-    }
-
-    public HierarcyBuilder addTableToOutput(Server server, String dbName,
-        String tableName) {
-      List<DBModelAuthorizable> tableHierarchy = new ArrayList<DBModelAuthorizable>();
-      addDbToOutput(server, dbName);
-      tableHierarchy.add(server);
-      tableHierarchy.add(new Database(dbName));
-      tableHierarchy.add(new Table(tableName));
-      authHierarchy.add(tableHierarchy);
-      return this;
-    }
-
-    public List<List<DBModelAuthorizable>> build() {
-      return authHierarchy;
-    }
-  }
-
-  private HiveAuthzConf authzConf;
-  private final Server authServer;
-  private final HiveConf hiveConf;
-  private final ImmutableSet<String> serviceUsers;
-  private HiveAuthzBinding hiveAuthzBinding;
-  private final String warehouseDir;
-  private static boolean sentryCacheOutOfSync = false;
+public class MetastoreAuthzBinding extends MetastoreAuthzBindingBase {
 
   public MetastoreAuthzBinding(Configuration config) throws Exception {
     super(config);
-    String hiveAuthzConf = config.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-    if (hiveAuthzConf == null
-        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-      throw new IllegalArgumentException("Configuration key "
-          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-          + "' is invalid.");
-    }
-    try {
-      authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-    } catch (MalformedURLException e) {
-      throw new IllegalArgumentException("Configuration key "
-          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " specifies a malformed URL '"
-          + hiveAuthzConf + "'", e);
-    }
-    hiveConf = new HiveConf(config, this.getClass());
-    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME
-        .getVar()));
-    serviceUsers = ImmutableSet.copyOf(toTrimedLower(Sets.newHashSet(authzConf
-        .getStrings(AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(),
-            new String[] { "" }))));
-    warehouseDir = hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
-
   }
 
-  /**
-   * Main listener callback which is the entry point for Sentry
-   */
   @Override
-  public void onEvent(PreEventContext context) throws MetaException,
-      NoSuchObjectException, InvalidOperationException {
-
-    if (!needsAuthorization(getUserName())) {
-      return;
-    }
-    switch (context.getEventType()) {
-    case CREATE_TABLE:
-      authorizeCreateTable((PreCreateTableEvent) context);
-      break;
-    case DROP_TABLE:
-      authorizeDropTable((PreDropTableEvent) context);
-      break;
-    case ALTER_TABLE:
-      authorizeAlterTable((PreAlterTableEvent) context);
-      break;
-    case ADD_PARTITION:
-      authorizeAddPartition((PreAddPartitionEvent) context);
-      break;
-    case DROP_PARTITION:
-      authorizeDropPartition((PreDropPartitionEvent) context);
-      break;
-    case ALTER_PARTITION:
-      authorizeAlterPartition((PreAlterPartitionEvent) context);
-      break;
-    case CREATE_DATABASE:
-      authorizeCreateDatabase();
-      break;
-    case DROP_DATABASE:
-      authorizeDropDatabase((PreDropDatabaseEvent) context);
-      break;
-    case LOAD_PARTITION_DONE:
-      // noop for now
-      break;
-    default:
-      break;
-    }
-  }
-
-  private void authorizeCreateDatabase()
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(HiveOperation.CREATEDATABASE,
-        new HierarcyBuilder().addServerToOutput(getAuthServer()).build(),
-        new HierarcyBuilder().addServerToOutput(getAuthServer()).build());
-  }
-
-  private void authorizeDropDatabase(PreDropDatabaseEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(HiveOperation.DROPDATABASE,
- new HierarcyBuilder()
-.addDbToOutput(getAuthServer(),
-            context.getDatabase().getName()).build(),
-        new HierarcyBuilder().addDbToOutput(getAuthServer(),
-            context.getDatabase().getName()).build());
-  }
-
-  private void authorizeCreateTable(PreCreateTableEvent context)
-      throws InvalidOperationException, MetaException {
-    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-    inputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder();
-    outputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
-
-    if (!StringUtils.isEmpty(context.getTable().getSd().getLocation())) {
-      String uriPath;
-      try {
-        uriPath = PathUtils.parseDFSURI(warehouseDir,
-            getSdLocation(context.getTable().getSd()));
-      } catch(URISyntaxException e) {
-        throw new MetaException(e.getMessage());
-      }
-      inputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-    }
-    authorizeMetastoreAccess(HiveOperation.CREATETABLE, inputBuilder.build(),
-        outputBuilder.build());
-  }
-
-  private void authorizeDropTable(PreDropTableEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(
-        HiveOperation.DROPTABLE,
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(), context.getTable().getTableName())
-            .build(),
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getTable().getDbName(), context.getTable().getTableName())
-            .build());
-  }
-
-  private void authorizeAlterTable(PreAlterTableEvent context)
-      throws InvalidOperationException, MetaException {
-    /*
-     * There are multiple alter table options and it's tricky to figure which is
-     * attempted here. Currently all alter table needs full level privilege
-     * except the for setting location which also needs a privile on URI. Hence
-     * we set initially set the operation to ALTERTABLE_ADDCOLS. If the client
-     * has specified the location, then change to ALTERTABLE_LOCATION
-     */
-    HiveOperation operation = HiveOperation.ALTERTABLE_ADDCOLS;
-    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-    inputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
-        .getDbName(), context.getOldTable().getTableName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder();
-    outputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
-        .getDbName(), context.getOldTable().getTableName());
-
-    // if the operation requires location change, then add URI privilege check
-    String oldLocationUri;
-    String newLocationUri;
-    try {
-      oldLocationUri = PathUtils.parseDFSURI(warehouseDir,
-          getSdLocation(context.getOldTable().getSd()));
-      newLocationUri = PathUtils.parseDFSURI(warehouseDir,
-          getSdLocation(context.getNewTable().getSd()));
-    } catch (URISyntaxException e) {
-      throw new MetaException(e.getMessage());
-    }
-    if (oldLocationUri.compareTo(newLocationUri) != 0) {
-      outputBuilder.addUriToOutput(getAuthServer(), newLocationUri,
-          warehouseDir);
-      operation = HiveOperation.ALTERTABLE_LOCATION;
-    }
-    authorizeMetastoreAccess(
-        operation,
-        inputBuilder.build(), outputBuilder.build());
-
-  }
-
-  private void authorizeAddPartition(PreAddPartitionEvent context)
-      throws InvalidOperationException, MetaException, NoSuchObjectException {
-    for (Partition mapiPart : context.getPartitions()) {
-	    HierarcyBuilder inputBuilder = new HierarcyBuilder();
-      inputBuilder.addTableToOutput(getAuthServer(), mapiPart
-          .getDbName(), mapiPart.getTableName());
-      HierarcyBuilder outputBuilder = new HierarcyBuilder();
-	    outputBuilder.addTableToOutput(getAuthServer(), mapiPart
-	        .getDbName(), mapiPart.getTableName());
-	    // check if we need to validate URI permissions when storage location is
-	    // non-default, ie something not under the parent table
-
-      String partitionLocation = null;
-      if (mapiPart.isSetSd()) {
-        partitionLocation = mapiPart.getSd().getLocation();
-	    }
-	    if (!StringUtils.isEmpty(partitionLocation)) {
-	      String tableLocation = context
-	          .getHandler()
-	          .get_table(mapiPart.getDbName(),
-	              mapiPart.getTableName()).getSd().getLocation();
-	      String uriPath;
-	      try {
-	        uriPath = PathUtils.parseDFSURI(warehouseDir, mapiPart
-	            .getSd().getLocation());
-	      } catch (URISyntaxException e) {
-	        throw new MetaException(e.getMessage());
-	      }
-        if (!partitionLocation.equals(tableLocation) &&
-            !partitionLocation.startsWith(tableLocation + File.separator)) {
-          outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-	      }
-	    }
-      authorizeMetastoreAccess(HiveOperation.ALTERTABLE_ADDPARTS,
-	        inputBuilder.build(), outputBuilder.build());
-    }
-  }
-
-  protected void authorizeDropPartition(PreDropPartitionEvent context)
-      throws InvalidOperationException, MetaException {
-    authorizeMetastoreAccess(
-        HiveOperation.ALTERTABLE_DROPPARTS,
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getPartition().getDbName(),
-            context.getPartition().getTableName()).build(),
-        new HierarcyBuilder().addTableToOutput(getAuthServer(),
-            context.getPartition().getDbName(),
-            context.getPartition().getTableName()).build());
-  }
-
-  private void authorizeAlterPartition(PreAlterPartitionEvent context)
-      throws InvalidOperationException, MetaException, NoSuchObjectException {
-    /*
-     * There are multiple alter partition options and it's tricky to figure out
-     * which is attempted here. Currently all alter partition need full level
-     * privilege except the for setting location which also needs a privilege on
-     * URI. Currently we don't try to distinguish the operation type. All alter
-     * partitions are treated as set-location
-     */
-    HierarcyBuilder inputBuilder = new HierarcyBuilder().addTableToOutput(
-        getAuthServer(), context.getDbName(), context.getTableName());
-    HierarcyBuilder outputBuilder = new HierarcyBuilder().addTableToOutput(
-        getAuthServer(), context.getDbName(), context.getTableName());
-
-    Partition partition = context.getNewPartition();
-    String partitionLocation = getSdLocation(partition.getSd());
-    if (!StringUtils.isEmpty(partitionLocation)) {
-      String tableLocation = context.getHandler().get_table(
-          partition.getDbName(), partition.getTableName()).getSd().getLocation();
-
-      String uriPath;
-      try {
-        uriPath = PathUtils.parseDFSURI(warehouseDir, partitionLocation);
-        } catch (URISyntaxException e) {
-        throw new MetaException(e.getMessage());
-      }
-      if (!partitionLocation.startsWith(tableLocation + File.separator)) {
-        outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
-      }
-    }
-    authorizeMetastoreAccess(
-        HiveOperation.ALTERPARTITION_LOCATION,
-        inputBuilder.build(), outputBuilder.build());
-  }
-
-  private InvalidOperationException invalidOperationException(Exception e) {
-    InvalidOperationException ex = new InvalidOperationException(e.getMessage());
-    ex.initCause(e.getCause());
-    return ex;
-  }
-
-  /**
-   * Assemble the required privileges and requested privileges. Validate using
-   * Hive bind auth provider
-   * @param hiveOp
-   * @param inputHierarchy
-   * @param outputHierarchy
-   * @throws InvalidOperationException
-   */
   protected void authorizeMetastoreAccess(HiveOperation hiveOp,
       List<List<DBModelAuthorizable>> inputHierarchy,
-      List<List<DBModelAuthorizable>> outputHierarchy)
-      throws InvalidOperationException {
+      List<List<DBModelAuthorizable>> outputHierarchy) throws InvalidOperationException {
     if (isSentryCacheOutOfSync()) {
       throw invalidOperationException(new SentryUserException(
           "Metastore/Sentry cache is out of sync"));
@@ -419,56 +69,6 @@ public class MetastoreAuthzBinding extends MetaStorePreEventListener {
     } catch (Exception e) {
       throw invalidOperationException(e);
     }
-
-  }
-
-  public Server getAuthServer() {
-    return authServer;
-  }
-
-  private boolean needsAuthorization(String userName) {
-    return !serviceUsers.contains(userName);
-  }
-
-  private static Set<String> toTrimedLower(Set<String> s) {
-    Set<String> result = Sets.newHashSet();
-    for (String v : s) {
-      result.add(v.trim().toLowerCase());
-    }
-    return result;
-  }
-
-  private HiveAuthzBinding getHiveAuthzBinding() throws Exception {
-    if (hiveAuthzBinding == null) {
-      hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, hiveConf, authzConf);
-    }
-    return hiveAuthzBinding;
-  }
-
-  private String getUserName() throws MetaException {
-    try {
-      return Utils.getUGI().getShortUserName();
-    } catch (LoginException e) {
-      throw new MetaException("Failed to get username " + e.getMessage());
-    } catch (IOException e) {
-      throw new MetaException("Failed to get username " + e.getMessage());
-    }
-  }
-
-  private String getSdLocation(StorageDescriptor sd) {
-    if (sd == null) {
-      return "";
-    } else {
-      return sd.getLocation();
-    }
-  }
-
-  public static boolean isSentryCacheOutOfSync() {
-    return sentryCacheOutOfSync;
-  }
-
-  public static void setSentryCacheOutOfSync(boolean sentryCacheOutOfSync) {
-    MetastoreAuthzBinding.sentryCacheOutOfSync = sentryCacheOutOfSync;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
deleted file mode 100644
index 0330db9..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.metastore;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.List;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.UnknownDBException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.thrift.TException;
-
-public class SentryHiveMetaStoreClient extends HiveMetaStoreClient implements
-    IMetaStoreClient {
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private HiveAuthzConf authzConf;
-
-  public SentryHiveMetaStoreClient(HiveConf conf) throws MetaException {
-    super(conf);
-  }
-
-  public SentryHiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader)
-      throws MetaException {
-    super(conf, hookLoader);
-  }
-
-  @Override
-  public List<String> getDatabases(String databasePattern) throws MetaException {
-    return filterDatabases(super.getDatabases(databasePattern));
-  }
-
-  @Override
-  public List<String> getAllDatabases() throws MetaException {
-    return filterDatabases(super.getAllDatabases());
-  }
-
-  @Override
-  public List<String> getTables(String dbName, String tablePattern)
-      throws MetaException {
-    return filterTables(dbName, super.getTables(dbName, tablePattern));
-  }
-
-  @Override
-  public List<String> getAllTables(String dbName) throws MetaException {
-    return filterTables(dbName, super.getAllTables(dbName));
-  }
-
-  @Override
-  public List<String> listTableNamesByFilter(String dbName, String filter,
-      short maxTables) throws InvalidOperationException, UnknownDBException,
-      TException {
-    return filterTables(dbName,
-        super.listTableNamesByFilter(dbName, filter, maxTables));
-  }
-
-  /**
-   * Invoke Hive database filtering that removes the entries which use has no
-   * privileges to access
-   * 
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterDatabases(List<String> dbList)
-      throws MetaException {
-    try {
-      return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
-          dbList, HiveOperation.SHOWDATABASES, getUserName());
-    } catch (SemanticException e) {
-      throw new MetaException("Error getting DB list " + e.getMessage());
-    }
-  }
-
-  /**
-   * Invoke Hive table filtering that removes the entries which use has no
-   * privileges to access
-   * 
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterTables(String dbName, List<String> tabList)
-      throws MetaException {
-    try {
-      return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
-          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
-    } catch (SemanticException e) {
-      throw new MetaException("Error getting Table list " + e.getMessage());
-    }
-  }
-
-  private String getUserName() {
-    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
-  }
-
-  /**
-   * load Hive auth provider
-   * 
-   * @return
-   * @throws MetaException
-   */
-  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
-    if (hiveAuthzBinding == null) {
-      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-      if (hiveAuthzConf == null
-          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-            + "' is invalid.");
-      }
-      try {
-        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-      } catch (MalformedURLException e) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "' "
-            + e.getMessage());
-      }
-      try {
-        hiveAuthzBinding = new HiveAuthzBinding(
-            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
-      } catch (Exception e) {
-        throw new MetaException("Failed to load Hive binding " + e.getMessage());
-      }
-    }
-    return hiveAuthzBinding;
-  }
-
-  private HiveConf getConf() {
-    return SessionState.get().getConf();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
deleted file mode 100644
index b551788..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.metastore;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreFilterHook;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PartitionSpec;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-
-public class SentryMetaStoreFilterHook implements MetaStoreFilterHook {
-
-  static final protected Log LOG = LogFactory.getLog(SentryMetaStoreFilterHook.class);
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private HiveAuthzConf authzConf;
-
-  public SentryMetaStoreFilterHook(HiveConf hiveConf) { //NOPMD
-  }
-
-  @Override
-  public List<String> filterDatabases(List<String> dbList) {
-    return filterDb(dbList);
-  }
-
-  @Override
-  public Database filterDatabase(Database dataBase)
-      throws NoSuchObjectException {
-    return dataBase;
-  }
-
-  @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) {
-    return filterTab(dbName, tableList);
-  }
-
-  @Override
-  public Table filterTable(Table table) throws NoSuchObjectException {
-    return table;
-  }
-
-  @Override
-  public List<Table> filterTables(List<Table> tableList) {
-    return tableList;
-  }
-
-  @Override
-  public List<Partition> filterPartitions(List<Partition> partitionList) {
-    return partitionList;
-  }
-
-  @Override
-  public List<PartitionSpec> filterPartitionSpecs(
-      List<PartitionSpec> partitionSpecList) {
-    return partitionSpecList;
-  }
-
-  @Override
-  public Partition filterPartition(Partition partition)
-      throws NoSuchObjectException {
-    return partition;
-  }
-
-  @Override
-  public List<String> filterPartitionNames(String dbName, String tblName,
-      List<String> partitionNames) {
-    return partitionNames;
-  }
-
-  @Override
-  public Index filterIndex(Index index) throws NoSuchObjectException {
-    return index;
-  }
-
-  @Override
-  public List<String> filterIndexNames(String dbName, String tblName,
-      List<String> indexList) {
-    return indexList;
-  }
-
-  @Override
-  public List<Index> filterIndexes(List<Index> indexeList) {
-    return indexeList;
-  }
-
-  /**
-   * Invoke Hive database filtering that removes the entries which use has no
-   * privileges to access
-   * @param dbList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterDb(List<String> dbList) {
-    try {
-      return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
-          dbList, HiveOperation.SHOWDATABASES, getUserName());
-    } catch (Exception e) {
-      LOG.warn("Error getting DB list ", e);
-      return new ArrayList<String>();
-    } finally {
-      close();
-    }
-  }
-
-  /**
-   * Invoke Hive table filtering that removes the entries which use has no
-   * privileges to access
-   * @param tabList
-   * @return
-   * @throws MetaException
-   */
-  private List<String> filterTab(String dbName, List<String> tabList) {
-    try {
-      return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
-          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
-    } catch (Exception e) {
-      LOG.warn("Error getting Table list ", e);
-      return new ArrayList<String>();
-    } finally {
-      close();
-    }
-  }
-
-  private String getUserName() {
-    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
-  }
-
-  /**
-   * load Hive auth provider
-   * @return
-   * @throws MetaException
-   */
-  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
-    if (hiveAuthzBinding == null) {
-      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
-      if (hiveAuthzConf == null
-          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
-            + "' is invalid.");
-      }
-      try {
-        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
-      } catch (MalformedURLException e) {
-        throw new MetaException("Configuration key "
-            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
-            + " specifies a malformed URL '" + hiveAuthzConf + "' "
-            + e.getMessage());
-      }
-      try {
-        hiveAuthzBinding = new HiveAuthzBinding(
-            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
-      } catch (Exception e) {
-        throw new MetaException("Failed to load Hive binding " + e.getMessage());
-      }
-    }
-    return hiveAuthzBinding;
-  }
-
-  private HiveConf getConf() {
-    return SessionState.get().getConf();
-  }
-
-  private void close() {
-    if (hiveAuthzBinding != null) {
-      hiveAuthzBinding.close();
-      hiveAuthzBinding = null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
index 8b716c3..c7ac070 100644
--- a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
+++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
@@ -48,29 +48,29 @@ public class TestURI {
   @Test
   public void testParseURIIncorrectFilePrefix() throws SemanticException {
     Assert.assertEquals("file:///some/path",
-        HiveAuthzBindingHook.parseURI("file:/some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("file:/some/path").getName());
   }
   @Test
   public void testParseURICorrectFilePrefix() throws SemanticException {
     Assert.assertEquals("file:///some/path",
-        HiveAuthzBindingHook.parseURI("file:///some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("file:///some/path").getName());
   }
   @Test
   public void testParseURINoFilePrefix() throws SemanticException {
     conf.set(ConfVars.METASTOREWAREHOUSE.varname, "file:///path/to/warehouse");
     Assert.assertEquals("file:///some/path",
-        HiveAuthzBindingHook.parseURI("/some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("/some/path").getName());
   }
   @Test
   public void testParseURINoHDFSPrefix() throws SemanticException {
     conf.set(ConfVars.METASTOREWAREHOUSE.varname, "hdfs://namenode:8080/path/to/warehouse");
     Assert.assertEquals("hdfs://namenode:8080/some/path",
-        HiveAuthzBindingHook.parseURI("/some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("/some/path").getName());
   }
   @Test
   public void testParseURICorrectHDFSPrefix() throws SemanticException {
     Assert.assertEquals("hdfs:///some/path",
-        HiveAuthzBindingHook.parseURI("hdfs:///some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("hdfs:///some/path").getName());
   }
 
   @Test
@@ -78,7 +78,7 @@ public class TestURI {
     conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:8020");
     conf.set(ConfVars.METASTOREWAREHOUSE.varname, "/path/to/warehouse");
     Assert.assertEquals("hdfs://localhost:8020/some/path",
-        HiveAuthzBindingHook.parseURI("/some/path").getName());
+        HiveAuthzBindingHookBase.parseURI("/some/path").getName());
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-hdfs/sentry-hdfs-service/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/pom.xml b/sentry-hdfs/sentry-hdfs-service/pom.xml
index 8553685..78f9da7 100644
--- a/sentry-hdfs/sentry-hdfs-service/pom.xml
+++ b/sentry-hdfs/sentry-hdfs-service/pom.xml
@@ -30,7 +30,7 @@ limitations under the License.
   <dependencies>
     <dependency>
       <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-binding-hive</artifactId>
+      <artifactId>sentry-binding-hive-common</artifactId>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
@@ -109,5 +109,4 @@ limitations under the License.
     </dependency>
   </dependencies>
 
-
 </project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePluginWithHA.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePluginWithHA.java b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePluginWithHA.java
index 4f6d7ca..6476a01 100644
--- a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePluginWithHA.java
+++ b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePluginWithHA.java
@@ -23,7 +23,7 @@ import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sentry.hdfs.ServiceConstants.ServerConfig;
 import org.apache.sentry.provider.db.SentryPolicyStorePlugin.SentryPluginException;
-import org.apache.sentry.binding.metastore.MetastoreAuthzBinding;
+import org.apache.sentry.binding.metastore.MetastoreAuthzBindingBase;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,11 +51,11 @@ public class MetastorePluginWithHA extends MetastorePlugin {
       case CHILD_REMOVED:
         break;
       case CONNECTION_RECONNECTED:
-        MetastoreAuthzBinding.setSentryCacheOutOfSync(false);
+        MetastoreAuthzBindingBase.setSentryCacheOutOfSync(false);
         break;
       case CONNECTION_SUSPENDED:
       case CONNECTION_LOST:
-        MetastoreAuthzBinding.setSentryCacheOutOfSync(true);
+        MetastoreAuthzBindingBase.setSentryCacheOutOfSync(true);
         break;
       default:
         break;


[5/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)

Posted by sd...@apache.org.
SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/7a30c819
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/7a30c819
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/7a30c819

Branch: refs/heads/master
Commit: 7a30c819cf66fcff833db2da7993899b03a9d664
Parents: 4643f98
Author: Sun Dapeng <sd...@apache.org>
Authored: Wed Mar 23 10:19:02 2016 +0800
Committer: Sun Dapeng <sd...@apache.org>
Committed: Wed Mar 23 10:19:02 2016 +0800

----------------------------------------------------------------------
 pom.xml                                         |   5 +
 sentry-binding/pom.xml                          |  11 +-
 .../sentry-binding-hive-common/pom.xml          | 102 +++
 .../apache/hadoop/hive/SentryHiveConstants.java |  31 +
 .../hive/ql/exec/SentryFilterDDLTask.java       | 137 +++
 .../ql/exec/SentryHivePrivilegeObjectDesc.java  |  51 ++
 .../binding/hive/HiveAuthzBindingHookBase.java  | 826 +++++++++++++++++++
 .../hive/SentryIniPolicyFileFormatter.java      | 161 ++++
 .../binding/hive/SentryOnFailureHook.java       |  38 +
 .../hive/SentryOnFailureHookContext.java        |  98 +++
 .../hive/SentryOnFailureHookContextImpl.java    | 125 +++
 .../hive/SentryPolicyFileFormatFactory.java     |  44 +
 .../binding/hive/SentryPolicyFileFormatter.java |  39 +
 .../binding/hive/authz/HiveAuthzBinding.java    | 407 +++++++++
 .../binding/hive/authz/HiveAuthzPrivileges.java | 153 ++++
 .../binding/hive/authz/SentryConfigTool.java    | 622 ++++++++++++++
 .../sentry/binding/hive/conf/HiveAuthzConf.java | 269 ++++++
 .../conf/InvalidConfigurationException.java     |  31 +
 .../metastore/AuthorizingObjectStoreBase.java   | 412 +++++++++
 .../metastore/MetastoreAuthzBindingBase.java    | 450 ++++++++++
 .../metastore/SentryHiveMetaStoreClient.java    | 161 ++++
 .../metastore/SentryMetaStoreFilterHook.java    | 201 +++++
 .../SentryMetastorePostEventListenerBase.java   | 404 +++++++++
 sentry-binding/sentry-binding-hive-v2/pom.xml   |  17 +-
 sentry-binding/sentry-binding-hive/pom.xml      |  40 +-
 .../apache/hadoop/hive/SentryHiveConstants.java |  31 -
 .../hive/ql/exec/SentryFilterDDLTask.java       | 137 ---
 .../hive/ql/exec/SentryGrantRevokeTask.java     |   4 +-
 .../ql/exec/SentryHivePrivilegeObjectDesc.java  |  51 --
 .../binding/hive/HiveAuthzBindingHook.java      | 716 +---------------
 .../hive/HiveAuthzBindingSessionHook.java       |   2 +-
 .../hive/SentryIniPolicyFileFormatter.java      | 161 ----
 .../binding/hive/SentryOnFailureHook.java       |  38 -
 .../hive/SentryOnFailureHookContext.java        |  98 ---
 .../hive/SentryOnFailureHookContextImpl.java    | 125 ---
 .../hive/SentryPolicyFileFormatFactory.java     |  44 -
 .../binding/hive/SentryPolicyFileFormatter.java |  39 -
 .../binding/hive/authz/HiveAuthzBinding.java    | 407 ---------
 .../binding/hive/authz/HiveAuthzPrivileges.java | 153 ----
 .../binding/hive/authz/SentryConfigTool.java    | 622 --------------
 .../sentry/binding/hive/conf/HiveAuthzConf.java | 269 ------
 .../conf/InvalidConfigurationException.java     |  31 -
 .../metastore/AuthorizingObjectStore.java       |   6 +-
 .../metastore/MetastoreAuthzBinding.java        | 404 +--------
 .../metastore/SentryHiveMetaStoreClient.java    | 161 ----
 .../metastore/SentryMetaStoreFilterHook.java    | 201 -----
 .../org/apache/sentry/binding/hive/TestURI.java |  12 +-
 sentry-hdfs/sentry-hdfs-service/pom.xml         |   3 +-
 .../sentry/hdfs/MetastorePluginWithHA.java      |   6 +-
 49 files changed, 4804 insertions(+), 3752 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d25c314..3393c47 100644
--- a/pom.xml
+++ b/pom.xml
@@ -391,6 +391,11 @@ limitations under the License.
       </dependency>
       <dependency>
         <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-binding-hive-common</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
         <artifactId>sentry-binding-hive</artifactId>
         <version>${project.version}</version>
       </dependency>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml
index 9e4999b..830f0b1 100644
--- a/sentry-binding/pom.xml
+++ b/sentry-binding/pom.xml
@@ -30,14 +30,23 @@ limitations under the License.
   <packaging>pom</packaging>
 
   <modules>
-    <module>sentry-binding-hive</module>
     <module>sentry-binding-kafka</module>
+    <module>sentry-binding-hive-common</module>
     <module>sentry-binding-solr</module>
     <module>sentry-binding-sqoop</module>
   </modules>
 
   <profiles>
     <profile>
+      <id>hive-authz1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <modules>
+        <module>sentry-binding-hive</module>
+      </modules>
+    </profile>
+    <profile>
       <id>hive-authz2</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/pom.xml b/sentry-binding/sentry-binding-hive-common/pom.xml
new file mode 100644
index 0000000..3748522
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/pom.xml
@@ -0,0 +1,102 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry-binding</artifactId>
+    <version>1.7.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>sentry-binding-hive-common</artifactId>
+  <name>Sentry Hive Binding Common</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-model-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-file</artifactId>
+    </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-provider-cache</artifactId>
+      </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-policy-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <!-- required for SentryGrantRevokeTask -->
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-db</artifactId>
+    </dependency>
+  </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
new file mode 100644
index 0000000..5238414
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive;
+
+import java.util.EnumSet;
+
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
+
+public class SentryHiveConstants {
+  public static final EnumSet<PrivilegeType> ALLOWED_PRIVS = EnumSet.allOf(PrivilegeType.class);
+
+  public static final String PRIVILEGE_NOT_SUPPORTED = "Sentry does not support privilege: ";
+  public static final String PARTITION_PRIVS_NOT_SUPPORTED = "Sentry does not support partition level authorization";
+  public static final String GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT = "Sentry does not allow grant/revoke on: ";
+  public static final String GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL = "Sentry does not allow privileges to be granted/revoked to/from: ";
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
new file mode 100644
index 0000000..ca24531
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec;
+
+import static org.apache.hadoop.util.StringUtils.stringifyException;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.core.common.Subject;
+
+import com.google.common.base.Preconditions;
+
+public class SentryFilterDDLTask extends DDLTask {
+  private static final long serialVersionUID = 1L;
+  private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class);
+
+  private HiveAuthzBinding hiveAuthzBinding;
+  private Subject subject;
+  private HiveOperation stmtOperation;
+
+  public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject,
+      HiveOperation stmtOperation) {
+    Preconditions.checkNotNull(hiveAuthzBinding);
+    Preconditions.checkNotNull(subject);
+    Preconditions.checkNotNull(stmtOperation);
+
+    this.hiveAuthzBinding = hiveAuthzBinding;
+    this.subject = subject;
+    this.stmtOperation = stmtOperation;
+  }
+
+  public HiveAuthzBinding getHiveAuthzBinding() {
+    return hiveAuthzBinding;
+  }
+
+  public Subject getSubject() {
+    return subject;
+  }
+
+  public HiveOperation getStmtOperation() {
+    return stmtOperation;
+  }
+
+  @Override
+  public int execute(DriverContext driverContext) {
+    // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command.
+    ShowColumnsDesc showCols = work.getShowColumnsDesc();
+    try {
+      if (showCols != null) {
+        return showFilterColumns(showCols);
+      }
+    } catch (Throwable e) {
+      failed(e);
+      return 1;
+    }
+
+    return super.execute(driverContext);
+  }
+
+  private void failed(Throwable e) {
+    while (e.getCause() != null && e.getClass() == RuntimeException.class) {
+      e = e.getCause();
+    }
+    setException(e);
+    LOG.error(stringifyException(e));
+  }
+
+  /**
+   * Filter the command "show columns in table"
+   *
+   */
+  private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException {
+    Table table = Hive.get(conf).getTable(showCols.getTableName());
+
+    // write the results in the file
+    DataOutputStream outStream = null;
+    try {
+      Path resFile = new Path(showCols.getResFile());
+      FileSystem fs = resFile.getFileSystem(conf);
+      outStream = fs.create(resFile);
+
+      List<FieldSchema> cols = table.getCols();
+      cols.addAll(table.getPartCols());
+      // In case the query is served by HiveServer2, don't pad it with spaces,
+      // as HiveServer2 output is consumed by JDBC/ODBC clients.
+      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
+      outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(
+          fiterColumns(cols, table), false, isOutputPadded, null));
+      outStream.close();
+      outStream = null;
+    } catch (IOException e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
+    } finally {
+      IOUtils.closeStream(outStream);
+    }
+    return 0;
+  }
+
+  private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException {
+    // filter some columns that the subject has privilege on
+    return HiveAuthzBindingHookBase.filterShowColumns(getHiveAuthzBinding(),
+        cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
new file mode 100644
index 0000000..4fa4221
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+
+public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc {
+  private boolean isUri;
+  private boolean isServer;
+
+  public SentryHivePrivilegeObjectDesc() {
+    // reset table type which is on by default
+    super.setTable(false);
+  }
+
+  public boolean getUri() {
+    return isUri;
+  }
+
+  public void setUri(boolean isUri) {
+    this.isUri = isUri;
+  }
+
+  public boolean getServer() {
+    return isServer;
+  }
+
+  public void setServer(boolean isServer) {
+    this.isServer = isServer;
+  }
+
+  public boolean isSentryPrivObjectDesc() {
+    return isServer || isUri;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
new file mode 100644
index 0000000..6df939f
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
@@ -0,0 +1,826 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive;
+
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
+import java.io.Serializable;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.common.utils.PathUtils;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.cache.PrivilegeCache;
+import org.apache.sentry.provider.cache.SimplePrivilegeCache;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Splitter;
+import com.google.common.collect.ImmutableList;
+
+public abstract class HiveAuthzBindingHookBase extends AbstractSemanticAnalyzerHook {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzBindingHookBase.class);
+  protected final HiveAuthzBinding hiveAuthzBinding;
+  protected final HiveAuthzConf authzConf;
+  protected Database currDB = Database.ALL;
+  protected Table currTab;
+  protected AccessURI udfURI;
+  protected AccessURI serdeURI;
+  protected AccessURI partitionURI;
+  protected Table currOutTab = null;
+  protected Database currOutDB = null;
+  protected final List<String> serdeWhiteList;
+  protected boolean serdeURIPrivilegesEnabled;
+
+  protected final static HiveAuthzPrivileges columnMetaDataPrivilege =
+      new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+          .addInputObjectPriviledge(AuthorizableType.Column,
+              EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT))
+          .setOperationScope(HiveOperationScope.COLUMN).setOperationType(HiveOperationType.INFO)
+          .build();
+
+  // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants
+  // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same
+  // HiveOperationType, but we want to enforces different privileges on each statement.
+  // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants
+  // require table-level privileges.
+  protected boolean isDescTableBasic = false;
+
+  public HiveAuthzBindingHookBase() throws Exception {
+    SessionState session = SessionState.get();
+    if(session == null) {
+      throw new IllegalStateException("Session has not been started");
+    }
+    // HACK: set a random classname to force the Auth V2 in Hive
+    SessionState.get().setAuthorizer(null);
+
+    HiveConf hiveConf = session.getConf();
+    if(hiveConf == null) {
+      throw new IllegalStateException("Session HiveConf is null");
+    }
+    authzConf = loadAuthzConf(hiveConf);
+    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
+    String serdeWhiteLists =
+        authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
+            HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
+    serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
+    serdeURIPrivilegesEnabled =
+        authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
+            HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
+
+    FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
+  }
+
+  public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
+    boolean depreicatedConfigFile = false;
+    HiveAuthzConf newAuthzConf = null;
+    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
+      depreicatedConfigFile = true;
+    }
+
+    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+          + " value '" + hiveAuthzConf + "' is invalid.");
+    }
+    try {
+      newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+    } catch (MalformedURLException e) {
+      if (depreicatedConfigFile) {
+        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      } else {
+        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      }
+    }
+    return newAuthzConf;
+  }
+
+  @Override
+  public abstract ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+      throws SemanticException;
+
+  /**
+   * Post analyze hook that invokes hive auth bindings
+   */
+  @Override
+  public abstract void postAnalyze(HiveSemanticAnalyzerHookContext context,
+      List<Task<? extends Serializable>> rootTasks) throws SemanticException;
+
+  protected void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context,
+      HiveOperation hiveOp, AuthorizationException e) {
+    SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
+        context.getCommand(), context.getInputs(), context.getOutputs(),
+        hiveOp, currDB, currTab, udfURI, null, context.getUserName(),
+        context.getIpAddress(), e, context.getConf());
+    String csHooks = authzConf.get(
+        HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
+
+    try {
+      for (Hook aofh : getHooks(csHooks)) {
+        ((SentryOnFailureHook)aofh).run(hookCtx);
+      }
+    } catch (Exception ex) {
+      LOG.error("Error executing hook:", ex);
+    }
+  }
+
+  @VisibleForTesting
+  protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
+    for (int i = 0; i < ast.getChildCount(); i++) {
+      ASTNode child = (ASTNode)ast.getChild(i);
+      if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
+          child.getChildCount() == 1) {
+        return parseURI(BaseSemanticAnalyzer.
+          unescapeSQLString(child.getChild(0).getText()));
+      }
+    }
+    return null;
+  }
+
+  @VisibleForTesting
+  protected static AccessURI parseURI(String uri) throws SemanticException {
+    return parseURI(uri, false);
+  }
+
+  @VisibleForTesting
+  protected static AccessURI parseURI(String uri, boolean isLocal)
+      throws SemanticException {
+    try {
+      HiveConf conf = SessionState.get().getConf();
+      String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+      Path warehousePath = new Path(warehouseDir);
+      if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
+        FileSystem fs = FileSystem.get(conf);
+        warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
+      }
+      return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
+    } catch (Exception e) {
+      throw new SemanticException("Error parsing URI " + uri + ": " +
+        e.getMessage(), e);
+    }
+  }
+
+  // Find the current database for session
+  protected Database getCanonicalDb() {
+    return new Database(SessionState.get().getCurrentDatabase());
+  }
+
+  protected Database extractDatabase(ASTNode ast) throws SemanticException {
+    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
+    if (tableName.contains(".")) {
+      return new Database(tableName.split("\\.")[0]);
+    } else {
+      return getCanonicalDb();
+    }
+  }
+
+  protected Table extractTable(ASTNode ast) throws SemanticException {
+    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
+    if (tableName.contains(".")) {
+      return new Table(tableName.split("\\.")[1]);
+    } else {
+      return new Table(tableName);
+    }
+  }
+
+  public static void runFailureHook(SentryOnFailureHookContext hookContext,
+      String csHooks) {
+    try {
+      for (Hook aofh : getHooks(csHooks)) {
+        ((SentryOnFailureHook) aofh).run(hookContext);
+      }
+    } catch (Exception ex) {
+      LOG.error("Error executing hook:", ex);
+    }
+  }
+  /**
+   * Convert the input/output entities into authorizables. generate
+   * authorizables for cases like Database and metadata operations where the
+   * compiler doesn't capture entities. invoke the hive binding to validate
+   * permissions
+   *
+   * @param context
+   * @param stmtAuthObject
+   * @param stmtOperation
+   * @throws AuthorizationException
+   */
+  protected void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
+      HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws  AuthorizationException {
+    Set<ReadEntity> inputs = context.getInputs();
+    Set<WriteEntity> outputs = context.getOutputs();
+    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
+      LOG.debug("context.getInputs() = " + context.getInputs());
+      LOG.debug("context.getOutputs() = " + context.getOutputs());
+    }
+
+    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
+    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
+    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column
+    // level privs.
+    if (isDescTableBasic) {
+      stmtAuthObject = columnMetaDataPrivilege;
+    }
+
+    switch (stmtAuthObject.getOperationScope()) {
+
+    case SERVER :
+      // validate server level privileges if applicable. Eg create UDF,register jar etc ..
+      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
+      serverHierarchy.add(hiveAuthzBinding.getAuthServer());
+      inputHierarchy.add(serverHierarchy);
+      break;
+    case DATABASE:
+      // workaround for database scope statements (create/alter/drop db)
+      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
+      dbHierarchy.add(hiveAuthzBinding.getAuthServer());
+      dbHierarchy.add(currDB);
+      inputHierarchy.add(dbHierarchy);
+      outputHierarchy.add(dbHierarchy);
+
+      getInputHierarchyFromInputs(inputHierarchy, inputs);
+
+      if (serdeURI != null) {
+        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        serdeUriHierarchy.add(serdeURI);
+        outputHierarchy.add(serdeUriHierarchy);
+      }
+      break;
+    case TABLE:
+      // workaround for add partitions
+      if(partitionURI != null) {
+        inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
+      }
+
+      getInputHierarchyFromInputs(inputHierarchy, inputs);
+      for (WriteEntity writeEntity: outputs) {
+        if (filterWriteEntity(writeEntity)) {
+          continue;
+        }
+        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+        entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
+        outputHierarchy.add(entityHierarchy);
+      }
+      // workaround for metadata queries.
+      // Capture the table name in pre-analyze and include that in the input entity list
+      if (currTab != null) {
+        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+        externalAuthorizableHierarchy.add(currDB);
+        externalAuthorizableHierarchy.add(currTab);
+        inputHierarchy.add(externalAuthorizableHierarchy);
+      }
+
+
+
+      // workaround for DDL statements
+      // Capture the table name in pre-analyze and include that in the output entity list
+      if (currOutTab != null) {
+        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+        externalAuthorizableHierarchy.add(currOutDB);
+        externalAuthorizableHierarchy.add(currOutTab);
+        outputHierarchy.add(externalAuthorizableHierarchy);
+      }
+
+      if (serdeURI != null) {
+        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        serdeUriHierarchy.add(serdeURI);
+        outputHierarchy.add(serdeUriHierarchy);
+      }
+
+      break;
+    case FUNCTION:
+      /* The 'FUNCTION' privilege scope currently used for
+       *  - CREATE TEMP FUNCTION
+       *  - DROP TEMP FUNCTION.
+       */
+      if (udfURI != null) {
+        List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        udfUriHierarchy.add(udfURI);
+        inputHierarchy.add(udfUriHierarchy);
+        for (WriteEntity writeEntity : outputs) {
+          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
+          outputHierarchy.add(entityHierarchy);
+        }
+      }
+      break;
+    case CONNECT:
+      /* The 'CONNECT' is an implicit privilege scope currently used for
+       *  - USE <db>
+       *  It's allowed when the user has any privilege on the current database. For application
+       *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
+       */
+      List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
+      connectHierarchy.add(hiveAuthzBinding.getAuthServer());
+      // by default allow connect access to default db
+      Table currTbl = Table.ALL;
+      Column currCol = Column.ALL;
+      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
+          "false".equalsIgnoreCase(authzConf.
+              get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
+        currDB = Database.ALL;
+        currTbl = Table.SOME;
+      }
+
+      connectHierarchy.add(currDB);
+      connectHierarchy.add(currTbl);
+      connectHierarchy.add(currCol);
+
+      inputHierarchy.add(connectHierarchy);
+      outputHierarchy.add(connectHierarchy);
+      break;
+    case COLUMN:
+      for (ReadEntity readEntity: inputs) {
+        if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
+          addColumnHierarchy(inputHierarchy, readEntity);
+        } else {
+          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+          entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
+          entityHierarchy.add(Column.ALL);
+          inputHierarchy.add(entityHierarchy);
+        }
+      }
+      break;
+    default:
+      throw new AuthorizationException("Unknown operation scope type " +
+          stmtAuthObject.getOperationScope().toString());
+    }
+
+    HiveAuthzBinding binding = null;
+    try {
+      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
+    } catch (SemanticException e) {
+      // Will use the original hiveAuthzBinding
+      binding = hiveAuthzBinding;
+    }
+    // validate permission
+    binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy,
+        outputHierarchy);
+  }
+
+  // Build the hierarchy of authorizable object for the given entity type.
+  private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) {
+    List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>();
+    switch (entity.getType()) {
+    case TABLE:
+      objectHierarchy.add(new Database(entity.getTable().getDbName()));
+      objectHierarchy.add(new Table(entity.getTable().getTableName()));
+      break;
+    case PARTITION:
+    case DUMMYPARTITION:
+      objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
+      objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
+      break;
+    case DFS_DIR:
+    case LOCAL_DIR:
+      try {
+        objectHierarchy.add(parseURI(entity.toString(),
+            entity.getType().equals(Entity.Type.LOCAL_DIR)));
+      } catch (Exception e) {
+        throw new AuthorizationException("Failed to get File URI", e);
+      }
+      break;
+    case DATABASE:
+    case FUNCTION:
+      // TODO use database entities from compiler instead of capturing from AST
+      break;
+    default:
+      throw new UnsupportedOperationException("Unsupported entity type " +
+          entity.getType().name());
+    }
+    return objectHierarchy;
+  }
+
+  /**
+   * Add column level hierarchy to inputHierarchy
+   *
+   * @param inputHierarchy
+   * @param entity
+   * @param sentryContext
+   */
+  protected void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy,
+      ReadEntity entity) {
+    List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+    entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+    entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity));
+
+    switch (entity.getType()) {
+    case TABLE:
+    case PARTITION:
+      List<String> cols = entity.getAccessedColumns();
+      for (String col : cols) {
+        List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy);
+        colHierarchy.add(new Column(col));
+        inputHierarchy.add(colHierarchy);
+      }
+      break;
+    default:
+      inputHierarchy.add(entityHierarchy);
+    }
+  }
+
+  /**
+   * Get Authorizable from inputs and put into inputHierarchy
+   *
+   * @param inputHierarchy
+   * @param entity
+   * @param sentryContext
+   */
+  protected void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy,
+      Set<ReadEntity> inputs) {
+    for (ReadEntity readEntity: inputs) {
+      // skip the tables/view that are part of expanded view definition
+      // skip the Hive generated dummy entities created for queries like 'select <expr>'
+      if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
+        continue;
+      }
+      if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
+        addColumnHierarchy(inputHierarchy, readEntity);
+      } else {
+        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+        entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
+        inputHierarchy.add(entityHierarchy);
+      }
+    }
+  }
+
+  // Check if this write entity needs to skipped
+  private boolean filterWriteEntity(WriteEntity writeEntity)
+      throws AuthorizationException {
+    // skip URI validation for session scratch file URIs
+    if (writeEntity.isTempURI()) {
+      return true;
+    }
+    try {
+      if (writeEntity.getTyp().equals(Type.DFS_DIR)
+          || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
+        HiveConf conf = SessionState.get().getConf();
+        String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+        URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir,
+          conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
+        URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir,
+          writeEntity.getLocation().getPath()));
+        LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI);
+        if (PathUtils.impliesURI(scratchURI, requestURI)) {
+          return true;
+        }
+        URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
+        URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
+        LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI);
+        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
+          return true;
+        }
+      }
+    } catch (Exception e) {
+      throw new AuthorizationException("Failed to extract uri details", e);
+    }
+    return false;
+  }
+
+  public static List<String> filterShowTables(
+      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
+      HiveOperation operation, String userName, String dbName)
+          throws SemanticException {
+    List<String> filteredResult = new ArrayList<String>();
+    Subject subject = new Subject(userName);
+    HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
+        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
+        setOperationScope(HiveOperationScope.TABLE).
+        setOperationType(HiveOperationType.INFO).
+        build();
+
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    for (String tableName : queryResult) {
+      // if user has privileges on table, add to filtered list, else discard
+      Table table = new Table(tableName);
+      Database database;
+      database = new Database(dbName);
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(table);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(table.getName());
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+      }
+    }
+    return filteredResult;
+  }
+
+  public static List<FieldSchema> filterShowColumns(
+      HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols,
+      HiveOperation operation, String userName, String tableName, String dbName)
+          throws SemanticException {
+    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
+    Subject subject = new Subject(userName);
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    Database database = new Database(dbName);
+    Table table = new Table(tableName);
+    for (FieldSchema col : cols) {
+      // if user has privileges on column, add to filtered list, else discard
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(table);
+      externalAuthorizableHierarchy.add(new Column(col.getName()));
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(col);
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the column is
+        // not added to
+        // filtered list.
+      }
+    }
+    return filteredResult;
+  }
+
+  public static List<String> filterShowDatabases(
+      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
+      HiveOperation operation, String userName) throws SemanticException {
+    List<String> filteredResult = new ArrayList<String>();
+    Subject subject = new Subject(userName);
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
+        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
+        addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)).
+        setOperationScope(HiveOperationScope.CONNECT).
+        setOperationType(HiveOperationType.QUERY).
+        build();
+
+    for (String dbName:queryResult) {
+      // if user has privileges on database, add to filtered list, else discard
+      Database database = null;
+
+      // if default is not restricted, continue
+      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase(
+        hiveAuthzBinding.getAuthzConf().get(
+              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
+              "false"))) {
+        filteredResult.add(DEFAULT_DATABASE_NAME);
+        continue;
+      }
+
+      database = new Database(dbName);
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(Table.ALL);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(database.getName());
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+      }
+    }
+
+    return filteredResult;
+  }
+
+  /**
+   * Check if the given read entity is a table that has parents of type Table
+   * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both
+   * the original view and the tables/view that it selects from .
+   * The access authorization is only interested in the top level views and not the underlying tables.
+   * @param readEntity
+   * @return
+   */
+  private boolean isChildTabForView(ReadEntity readEntity) {
+    // If this is a table added for view, then we need to skip that
+    if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) {
+      return false;
+    }
+    if (readEntity.getParents() != null && readEntity.getParents().size() > 0) {
+      for (ReadEntity parentEntity : readEntity.getParents()) {
+        if (!parentEntity.getType().equals(Type.TABLE)) {
+          return false;
+        }
+      }
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  /**
+   * Returns the hooks specified in a configuration variable.  The hooks are returned in a list in
+   * the order they were specified in the configuration variable.
+   *
+   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
+   *                    class names.
+   * @return            A list of the hooks, in the order they are listed in the value of hookConfVar
+   * @throws Exception
+   */
+  private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception {
+
+    List<T> hooks = new ArrayList<T>();
+    if (csHooks.isEmpty()) {
+      return hooks;
+    }
+    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
+      try {
+        @SuppressWarnings("unchecked")
+        T hook =
+            (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
+        hooks.add(hook);
+      } catch (ClassNotFoundException e) {
+        LOG.error(hookClass + " Class not found:" + e.getMessage());
+        throw e;
+      }
+    }
+
+    return hooks;
+  }
+
+  // Check if the given entity is identified as dummy by Hive compilers.
+  private boolean isDummyEntity(Entity entity) {
+    return entity.isDummy();
+  }
+
+  // create hiveBinding with PrivilegeCache
+  private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
+      String userName) throws SemanticException {
+    // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider
+    AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider();
+    Set<String> userPrivileges = authProvider.getPolicyEngine().getPrivileges(
+            authProvider.getGroupMapping().getGroups(userName), hiveAuthzBinding.getActiveRoleSet(),
+            hiveAuthzBinding.getAuthServer());
+
+    // create PrivilegeCache using user's privileges
+    PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
+    try {
+      // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend
+      return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(),
+              hiveAuthzBinding.getAuthzConf(), privilegeCache);
+    } catch (Exception e) {
+      LOG.error("Can not create HiveAuthzBinding with privilege cache.");
+      throw new SemanticException(e);
+    }
+  }
+
+  private static boolean hasPrefixMatch(List<String> prefixList, final String str) {
+    for (String prefix : prefixList) {
+      if (str.startsWith(prefix)) {
+        return true;
+      }
+    }
+
+    return false;
+  }
+
+  /**
+   * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null,
+   * the URI authorization checks will be skipped.
+   */
+  protected void setSerdeURI(String serdeClassName) throws SemanticException {
+    if (!serdeURIPrivilegesEnabled) {
+      return;
+    }
+
+    // WhiteList Serde Jar can be used by any users. WhiteList checking is
+    // done by comparing the Java package name. The assumption is cluster
+    // admin will ensure there is no Java namespace collision.
+    // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should
+    // ensure no custom Serde class is introduced under the same namespace.
+    if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
+      try {
+        CodeSource serdeSrc =
+            Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader())
+                .getProtectionDomain().getCodeSource();
+        if (serdeSrc == null) {
+          throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName);
+        }
+
+        String serdeJar = serdeSrc.getLocation().getPath();
+        if (serdeJar == null || serdeJar.isEmpty()) {
+          throw new SemanticException("Could not find the jar for Serde class " + serdeClassName
+              + "to validate privileges");
+        }
+
+        serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
+      } catch (ClassNotFoundException e) {
+        throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e);
+      }
+    }
+  }
+
+  protected HiveOperation getCurrentHiveStmtOp() {
+    SessionState sessState = SessionState.get();
+    if (sessState == null) {
+      // TODO: Warn
+      return null;
+    }
+    return sessState.getHiveOperation();
+  }
+
+  protected Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
+    // Extract the username from the hook context
+    return new Subject(context.getUserName());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
new file mode 100644
index 0000000..630bef3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.policy.common.PolicyConstants;
+import org.apache.sentry.provider.common.PolicyFileConstants;
+import org.apache.sentry.provider.common.ProviderBackendContext;
+import org.apache.sentry.provider.file.SimpleFileProviderBackend;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
+import com.google.common.io.Files;
+
+/**
+ * SentryIniPolicyFileFormatter is to parse file and write data to file for sentry mapping data with
+ * ini format, eg:
+ * [groups]
+ * group1=role1
+ * [roles]
+ * role1=server=server1
+ */
+public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryIniPolicyFileFormatter.class);
+
+  private static final String NL = System.getProperty("line.separator", "\n");
+
+  /**
+   * Write the sentry mapping data to ini file.
+   *
+   * @param resourcePath
+   *        The path of the output file
+   * @param sentryMappingData
+   *        The map for sentry mapping data, eg:
+   *        for the following mapping data:
+   *        group1=role1,role2
+   *        group2=role2,role3
+   *        role1=server=server1->db=db1
+   *        role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2
+   *        role3=server=server1->url=hdfs://localhost/path
+   *
+   *        The sentryMappingData will be inputed as:
+   *        {
+   *        groups={[group1={role1, role2}], group2=[role2, role3]},
+   *        roles={role1=[server=server1->db=db1],
+   *        role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2],
+   *        role3=[server=server1->url=hdfs://localhost/path]
+   *        }
+   *        }
+   */
+  @Override
+  public void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
+      throws Exception {
+    File destFile = new File(resourcePath);
+    if (destFile.exists() && !destFile.delete()) {
+      throw new IllegalStateException("Unable to delete " + destFile);
+    }
+    String contents = Joiner
+        .on(NL)
+        .join(
+        generateSection(PolicyFileConstants.GROUPS,
+                sentryMappingData.get(PolicyFileConstants.GROUPS)),
+        generateSection(PolicyFileConstants.ROLES,
+                sentryMappingData.get(PolicyFileConstants.ROLES)),
+            "");
+    LOGGER.info("Writing policy file to " + destFile + ":\n" + contents);
+    Files.write(contents, destFile, Charsets.UTF_8);
+  }
+
+  /**
+   * parse the ini file and return a map with all data
+   *
+   * @param resourcePath
+   *        The path of the input file
+   * @param conf
+   *        The configuration info
+   * @return the result of sentry mapping data in map structure.
+   */
+  @Override
+  public Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
+      throws Exception {
+    Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap();
+    // SimpleFileProviderBackend is used for parse the ini file
+    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath);
+    ProviderBackendContext context = new ProviderBackendContext();
+    context.setAllowPerDatabase(true);
+    // parse the ini file
+    policyFileBackend.initialize(context);
+
+    // SimpleFileProviderBackend parsed the input file and output the data in Table format.
+    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend
+        .getGroupRolePrivilegeTable();
+    Map<String, Set<String>> groupRolesMap = Maps.newHashMap();
+    Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap();
+    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
+      for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
+        // get the roles set for the current groupName
+        Set<String> tempRoles = groupRolesMap.get(groupName);
+        if (tempRoles == null) {
+          tempRoles = Sets.newHashSet();
+        }
+        Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
+        // if there has privilege for [group,role], if no privilege exist, the [group, role] info
+        // will be discard.
+        if (privileges != null) {
+          // update [group, role] mapping data
+          tempRoles.add(roleName);
+          groupRolesMap.put(groupName, tempRoles);
+          // update [role, privilege] mapping data
+          rolePrivilegesMap.put(roleName, privileges);
+        }
+      }
+    }
+    resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
+    resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
+    return resultMap;
+  }
+
+  // generate the ini section according to the mapping data.
+  private String generateSection(String name, Map<String, Set<String>> mappingData) {
+    if (mappingData.isEmpty()) {
+      return "";
+    }
+    List<String> lines = Lists.newArrayList();
+    lines.add("[" + name + "]");
+    for (Map.Entry<String, Set<String>> entry : mappingData.entrySet()) {
+      lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
+          PolicyConstants.ROLE_JOINER.join(entry.getValue())));
+    }
+    return Joiner.on(NL).join(lines);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
new file mode 100644
index 0000000..45a2925
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import org.apache.hadoop.hive.ql.hooks.Hook;
+
+/**
+ *
+ * SentryOnFailureHook allows Sentry to be extended
+ * with custom logic to be executed upon authorization failure.
+ *
+ */
+public interface SentryOnFailureHook extends Hook {
+
+  /**
+   *
+   * @param context
+   *     The hook context passed to each hook.
+   * @throws Exception
+   */
+  void run(SentryOnFailureHookContext context) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
new file mode 100644
index 0000000..c101a4f
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+
+/**
+ * Context information provided by Access to implementations
+ * of AccessOnFailureHook
+ */
+public interface SentryOnFailureHookContext  {
+
+  /**
+   * @return the command attempted by user
+   */
+  String getCommand();
+
+  /**
+    * @return the set of read entities
+    */
+  Set<ReadEntity> getInputs();
+
+  /**
+   * @return the set of write entities
+   */
+  Set<WriteEntity> getOutputs();
+
+  /**
+   * @return the operation
+   */
+  HiveOperation getHiveOp();
+
+  /**
+   * @return the user name
+   */
+  String getUserName();
+
+  /**
+   * @return the ip address
+   */
+  String getIpAddress();
+
+  /**
+   * @return the database object
+   */
+  Database getDatabase();
+
+  /**
+   * @return the table object
+   */
+  Table getTable();
+
+  /**
+   * @return the udf URI
+   */
+  AccessURI getUdfURI();
+
+  /**
+   * @return the partition URI
+   */
+  AccessURI getPartitionURI();
+
+  /**
+   * @return the authorization failure exception
+   */
+  AuthorizationException getException();
+
+  /**
+   * @return the config
+   */
+  Configuration getConf();
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
new file mode 100644
index 0000000..f97d7f3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+
+public class SentryOnFailureHookContextImpl implements SentryOnFailureHookContext {
+
+  private final String command;
+  private final Set<ReadEntity> inputs;
+  private final Set<WriteEntity> outputs;
+  private final HiveOperation hiveOp;
+  private final String userName;
+  private final String ipAddress;
+  private final Database database;
+  private final Table table;
+  private final AccessURI udfURI;
+  private final AccessURI partitionURI;
+  private final AuthorizationException authException;
+  private final Configuration conf;
+
+  public SentryOnFailureHookContextImpl(String command,
+      Set<ReadEntity> inputs, Set<WriteEntity> outputs, HiveOperation hiveOp,
+      Database db, Table tab, AccessURI udfURI, AccessURI partitionURI,
+      String userName, String ipAddress, AuthorizationException e,
+      Configuration conf) {
+    this.command = command;
+    this.inputs = inputs;
+    this.outputs = outputs;
+    this.hiveOp = hiveOp;
+    this.userName = userName;
+    this.ipAddress = ipAddress;
+    this.database = db;
+    this.table = tab;
+    this.udfURI = udfURI;
+    this.partitionURI = partitionURI;
+    this.authException = e;
+    this.conf = conf;
+  }
+
+  @Override
+  public String getCommand() {
+    return command;
+  }
+
+  @Override
+  public Set<ReadEntity> getInputs() {
+    return inputs;
+  }
+
+  @Override
+  public Set<WriteEntity> getOutputs() {
+    return outputs;
+  }
+
+  @Override
+  public HiveOperation getHiveOp() {
+    return hiveOp;
+  }
+
+  @Override
+  public String getUserName() {
+    return userName;
+  }
+
+  @Override
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  @Override
+  public Database getDatabase() {
+    return database;
+  }
+
+  @Override
+  public Table getTable() {
+    return table;
+  }
+
+  @Override
+  public AccessURI getUdfURI() {
+    return udfURI;
+  }
+
+  @Override
+  public AccessURI getPartitionURI() {
+    return partitionURI;
+  }
+
+  @Override
+  public AuthorizationException getException() {
+    return authException;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
new file mode 100644
index 0000000..d2c6072
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.lang.reflect.Constructor;
+
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+/**
+ * SentryPolicyFileFormatFactory is used to create FileFormatter for different file type according
+ * to the configuration, the default FileFormatter is for ini file.
+ */
+public class SentryPolicyFileFormatFactory {
+
+  public static SentryPolicyFileFormatter createFileFormatter(HiveAuthzConf conf) throws Exception {
+    // The default formatter is org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter, for ini
+    // file.
+    String policyFileFormatterName = conf.get(AuthzConfVars.AUTHZ_POLICY_FILE_FORMATTER.getVar());
+    // load the policy file formatter class
+    Constructor<?> policyFileFormatterConstructor = Class.forName(policyFileFormatterName)
+        .getDeclaredConstructor();
+    policyFileFormatterConstructor.setAccessible(true);
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = (SentryPolicyFileFormatter) policyFileFormatterConstructor
+        .newInstance();
+    return sentryPolicyFileFormatter;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
new file mode 100644
index 0000000..4f465b3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * SentryPolicyFileFormatter is to parse file and write data to file for sentry mapping data.
+ */
+public interface SentryPolicyFileFormatter {
+
+  // write the sentry mapping data to file
+  void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
+      throws Exception;
+
+  // parse the sentry mapping data from file
+  Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
+      throws Exception;
+
+}


[4/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)

Posted by sd...@apache.org.
http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
new file mode 100644
index 0000000..0a1d0e8
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
@@ -0,0 +1,407 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import java.lang.reflect.Constructor;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.SentryUserException;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.binding.hive.conf.InvalidConfigurationException;
+import org.apache.sentry.core.common.ActiveRoleSet;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.policy.common.PolicyEngine;
+import org.apache.sentry.provider.cache.PrivilegeCache;
+import org.apache.sentry.provider.cache.SimpleCacheProviderBackend;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.apache.sentry.provider.common.ProviderBackend;
+import org.apache.sentry.provider.common.ProviderBackendContext;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.Sets;
+
+public class HiveAuthzBinding {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzBinding.class);
+  private static final Splitter ROLE_SET_SPLITTER = Splitter.on(",").trimResults()
+      .omitEmptyStrings();
+  public static final String HIVE_BINDING_TAG = "hive.authz.bindings.tag";
+
+  private final HiveConf hiveConf;
+  private final Server authServer;
+  private final AuthorizationProvider authProvider;
+  private volatile boolean open;
+  private ActiveRoleSet activeRoleSet;
+  private HiveAuthzConf authzConf;
+
+  public static enum HiveHook {
+    HiveServer2,
+    HiveMetaStore
+    ;
+  }
+
+  public HiveAuthzBinding (HiveConf hiveConf, HiveAuthzConf authzConf) throws Exception {
+    this(HiveHook.HiveServer2, hiveConf, authzConf);
+  }
+
+  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf) throws Exception {
+    validateHiveConfig(hiveHook, hiveConf, authzConf);
+    this.hiveConf = hiveConf;
+    this.authzConf = authzConf;
+    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+    this.authProvider = getAuthProvider(hiveConf, authzConf, authServer.getName());
+    this.open = true;
+    this.activeRoleSet = parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
+        authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
+  }
+
+  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf,
+      PrivilegeCache privilegeCache) throws Exception {
+    validateHiveConfig(hiveHook, hiveConf, authzConf);
+    this.hiveConf = hiveConf;
+    this.authzConf = authzConf;
+    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+    this.authProvider = getAuthProviderWithPrivilegeCache(authzConf, authServer.getName(), privilegeCache);
+    this.open = true;
+    this.activeRoleSet = parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
+            authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
+  }
+
+  private static ActiveRoleSet parseActiveRoleSet(String name)
+      throws SentryUserException {
+    return parseActiveRoleSet(name, null);
+  }
+
+  private static ActiveRoleSet parseActiveRoleSet(String name,
+      Set<TSentryRole> allowedRoles) throws SentryUserException {
+    // if unset, then we choose the default of ALL
+    if (name.isEmpty()) {
+      return ActiveRoleSet.ALL;
+    } else if (AccessConstants.NONE_ROLE.equalsIgnoreCase(name)) {
+      return new ActiveRoleSet(new HashSet<String>());
+    } else if (AccessConstants.ALL_ROLE.equalsIgnoreCase(name)) {
+      return ActiveRoleSet.ALL;
+    } else if (AccessConstants.RESERVED_ROLE_NAMES.contains(name.toUpperCase())) {
+      String msg = "Role " + name + " is reserved";
+      throw new IllegalArgumentException(msg);
+    } else {
+      if (allowedRoles != null) {
+        // check if the user has been granted the role
+        boolean foundRole = false;
+        for (TSentryRole role : allowedRoles) {
+          if (role.getRoleName().equalsIgnoreCase(name)) {
+            foundRole = true;
+            break;
+          }
+        }
+        if (!foundRole) {
+          //Set the reason for hive binding to pick up
+          throw new SentryUserException("Not authorized to set role " + name, "Not authorized to set role " + name);
+
+        }
+      }
+      return new ActiveRoleSet(Sets.newHashSet(ROLE_SET_SPLITTER.split(name)));
+    }
+  }
+
+  private void validateHiveConfig(HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf)
+      throws InvalidConfigurationException{
+    if(hiveHook.equals(HiveHook.HiveMetaStore)) {
+      validateHiveMetaStoreConfig(hiveConf, authzConf);
+    }else if(hiveHook.equals(HiveHook.HiveServer2)) {
+      validateHiveServer2Config(hiveConf, authzConf);
+    }
+  }
+
+  private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf authzConf)
+      throws InvalidConfigurationException{
+    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
+        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
+    LOG.debug("Testing mode is " + isTestingMode);
+    if(!isTestingMode) {
+      boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
+      if(!sasl) {
+        throw new InvalidConfigurationException(
+            ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in non-testing mode");
+      }
+    } else {
+      boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI);
+      if(!setUgi) {
+        throw new InvalidConfigurationException(
+            ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false in non secure mode");
+      }
+    }
+  }
+
+  private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf authzConf)
+      throws InvalidConfigurationException{
+    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
+        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
+    LOG.debug("Testing mode is " + isTestingMode);
+    if(!isTestingMode) {
+      String authMethod = Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim();
+      if("none".equalsIgnoreCase(authMethod)) {
+        throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION +
+            " can't be none in non-testing mode");
+      }
+      boolean impersonation = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
+      boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty(
+          authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim());
+
+      if(impersonation && !allowImpersonation) {
+        LOG.error("Role based authorization does not work with HiveServer2 impersonation");
+        throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS +
+            " can't be set to true in non-testing mode");
+      }
+    }
+    String defaultUmask = hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
+    if("077".equalsIgnoreCase(defaultUmask)) {
+      LOG.error("HiveServer2 required a default umask of 077");
+      throw new InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY +
+          " should be 077 in non-testing mode");
+    }
+  }
+
+  // Instantiate the configured authz provider
+  public static AuthorizationProvider getAuthProvider(HiveConf hiveConf, HiveAuthzConf authzConf,
+        String serverName) throws Exception {
+    // get the provider class and resources from the authz config
+    String authProviderName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
+    String resourceName =
+        authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
+    String providerBackendName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar());
+    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
+
+    LOG.debug("Using authorization provider " + authProviderName +
+        " with resource " + resourceName + ", policy engine "
+        + policyEngineName + ", provider backend " + providerBackendName);
+      // load the provider backend class
+      Constructor<?> providerBackendConstructor =
+        Class.forName(providerBackendName).getDeclaredConstructor(Configuration.class, String.class);
+      providerBackendConstructor.setAccessible(true);
+    ProviderBackend providerBackend = (ProviderBackend) providerBackendConstructor.
+        newInstance(new Object[] {authzConf, resourceName});
+
+    // load the policy engine class
+    Constructor<?> policyConstructor =
+      Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
+    policyConstructor.setAccessible(true);
+    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+        newInstance(new Object[] {serverName, providerBackend});
+
+
+    // load the authz provider class
+    Constructor<?> constrctor =
+      Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
+    constrctor.setAccessible(true);
+    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
+  }
+
+  // Instantiate the authz provider using PrivilegeCache, this method is used for metadata filter function.
+  public static AuthorizationProvider getAuthProviderWithPrivilegeCache(HiveAuthzConf authzConf,
+      String serverName, PrivilegeCache privilegeCache) throws Exception {
+    // get the provider class and resources from the authz config
+    String authProviderName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
+    String resourceName =
+            authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
+    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
+
+    LOG.debug("Using authorization provider " + authProviderName +
+            " with resource " + resourceName + ", policy engine "
+            + policyEngineName + ", provider backend SimpleCacheProviderBackend");
+
+    ProviderBackend providerBackend = new SimpleCacheProviderBackend(authzConf, resourceName);
+    ProviderBackendContext context = new ProviderBackendContext();
+    context.setBindingHandle(privilegeCache);
+    providerBackend.initialize(context);
+
+    // load the policy engine class
+    Constructor<?> policyConstructor =
+            Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
+    policyConstructor.setAccessible(true);
+    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+            newInstance(new Object[] {serverName, providerBackend});
+
+    // load the authz provider class
+    Constructor<?> constrctor =
+            Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
+    constrctor.setAccessible(true);
+    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
+  }
+
+
+  /**
+   * Validate the privilege for the given operation for the given subject
+   * @param hiveOp
+   * @param stmtAuthPrivileges
+   * @param subject
+   * @param currDB
+   * @param inputEntities
+   * @param outputEntities
+   * @throws AuthorizationException
+   */
+  public void authorize(HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges,
+      Subject subject, List<List<DBModelAuthorizable>> inputHierarchyList,
+      List<List<DBModelAuthorizable>> outputHierarchyList)
+          throws AuthorizationException {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    boolean isDebug = LOG.isDebugEnabled();
+    if(isDebug) {
+      LOG.debug("Going to authorize statement " + hiveOp.name() +
+          " for subject " + subject.getName());
+    }
+
+    /* for each read and write entity captured by the compiler -
+     *    check if that object type is part of the input/output privilege list
+     *    If it is, then validate the access.
+     * Note the hive compiler gathers information on additional entities like partitions,
+     * etc which are not of our interest at this point. Hence its very
+     * much possible that the we won't be validating all the entities in the given list
+     */
+
+    // Check read entities
+    Map<AuthorizableType, EnumSet<DBModelAction>> requiredInputPrivileges =
+        stmtAuthPrivileges.getInputPrivileges();
+    if(isDebug) {
+      LOG.debug("requiredInputPrivileges = " + requiredInputPrivileges);
+      LOG.debug("inputHierarchyList = " + inputHierarchyList);
+    }
+    Map<AuthorizableType, EnumSet<DBModelAction>> requiredOutputPrivileges =
+        stmtAuthPrivileges.getOutputPrivileges();
+    if(isDebug) {
+      LOG.debug("requiredOuputPrivileges = " + requiredOutputPrivileges);
+      LOG.debug("outputHierarchyList = " + outputHierarchyList);
+    }
+
+    boolean found = false;
+    for (Map.Entry<AuthorizableType, EnumSet<DBModelAction>> entry : requiredInputPrivileges.entrySet()) {
+      AuthorizableType key = entry.getKey();
+      for (List<DBModelAuthorizable> inputHierarchy : inputHierarchyList) {
+        if (getAuthzType(inputHierarchy).equals(key)) {
+          found = true;
+          if (!authProvider.hasAccess(subject, inputHierarchy, entry.getValue(), activeRoleSet)) {
+            throw new AuthorizationException("User " + subject.getName() +
+                " does not have privileges for " + hiveOp.name());
+          }
+        }
+      }
+      if (!found && !key.equals(AuthorizableType.URI) && !(hiveOp.equals(HiveOperation.QUERY))
+          && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) {
+        //URI privileges are optional for some privileges: anyPrivilege, tableDDLAndOptionalUriPrivilege
+        //Query can mean select/insert/analyze where all of them have different required privileges.
+        //CreateAsSelect can has table/columns privileges with select.
+        //For these alone we skip if there is no equivalent input privilege
+        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
+        //the input privileges correctly
+        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in input privileges");
+      }
+      found = false;
+    }
+
+    for(AuthorizableType key: requiredOutputPrivileges.keySet()) {
+      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
+        if (getAuthzType(outputHierarchy).equals(key)) {
+          found = true;
+          if (!authProvider.hasAccess(subject, outputHierarchy, requiredOutputPrivileges.get(key), activeRoleSet)) {
+            throw new AuthorizationException("User " + subject.getName() +
+                " does not have privileges for " + hiveOp.name());
+          }
+        }
+      }
+      if(!found && !(key.equals(AuthorizableType.URI)) &&  !(hiveOp.equals(HiveOperation.QUERY))) {
+        //URI privileges are optional for some privileges: tableInsertPrivilege
+        //Query can mean select/insert/analyze where all of them have different required privileges.
+        //For these alone we skip if there is no equivalent output privilege
+        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
+        //the output privileges correctly
+        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in output privileges");
+      }
+      found = false;
+    }
+
+  }
+
+  public void setActiveRoleSet(String activeRoleSet,
+      Set<TSentryRole> allowedRoles) throws SentryUserException {
+    this.activeRoleSet = parseActiveRoleSet(activeRoleSet, allowedRoles);
+    hiveConf.set(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, activeRoleSet);
+  }
+
+  public ActiveRoleSet getActiveRoleSet() {
+    return activeRoleSet;
+  }
+
+  public Set<String> getGroups(Subject subject) {
+    return authProvider.getGroupMapping().getGroups(subject.getName());
+  }
+
+  public Server getAuthServer() {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    return authServer;
+  }
+
+  public HiveAuthzConf getAuthzConf() {
+    return authzConf;
+  }
+
+  public HiveConf getHiveConf() {
+    return hiveConf;
+  }
+
+  private AuthorizableType getAuthzType (List<DBModelAuthorizable> hierarchy){
+    return hierarchy.get(hierarchy.size() -1).getAuthzType();
+  }
+
+  public List<String> getLastQueryPrivilegeErrors() {
+    if (!open) {
+      throw new IllegalStateException("Binding has been closed");
+    }
+    return authProvider.getLastFailedPrivileges();
+  }
+
+  public void close() {
+    authProvider.close();
+  }
+
+  public AuthorizationProvider getCurrentAuthProvider() {
+    return authProvider;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
new file mode 100644
index 0000000..f164b30
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+
+/**
+ * Hive objects with required access privileges mapped to auth provider privileges
+ */
+public class HiveAuthzPrivileges {
+
+  /**
+   * Operation type used for privilege granting
+   */
+  public static enum HiveOperationType {
+    UNKNOWN,
+    DDL,
+    DML,
+    DATA_LOAD,
+    DATA_UNLOAD,
+    QUERY,
+    INFO
+  };
+
+  /**
+   * scope of the operation. The auth provider interface has different methods
+   * for some of these. Hence we want to be able to identity the auth scope of
+   * a statement eg. server level or DB level etc.
+   */
+  public static enum HiveOperationScope {
+    UNKNOWN,
+    SERVER,
+    DATABASE,
+    TABLE,
+    FUNCTION,
+    CONNECT,
+    COLUMN
+  }
+
+  public static enum HiveExtendedOperation {
+    TRANSFORM,
+    RESOURCE
+  }
+
+  public static class AuthzPrivilegeBuilder {
+    private final Map<AuthorizableType, EnumSet<DBModelAction>> inputPrivileges =
+        new HashMap<AuthorizableType ,EnumSet<DBModelAction>>();
+    private final Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges =
+        new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+    private HiveOperationType operationType;
+    private HiveOperationScope operationScope;
+
+    public AuthzPrivilegeBuilder addInputObjectPriviledge(AuthorizableType inputObjectType, EnumSet<DBModelAction> inputPrivilege) {
+      inputPrivileges.put(inputObjectType, inputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder addOutputEntityPriviledge(AuthorizableType outputEntityType, EnumSet<DBModelAction> outputPrivilege) {
+      outputPrivileges.put(outputEntityType, outputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder addOutputObjectPriviledge(AuthorizableType outputObjectType, EnumSet<DBModelAction> outputPrivilege) {
+      outputPrivileges.put(outputObjectType, outputPrivilege);
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder setOperationType(HiveOperationType operationType) {
+      this.operationType = operationType;
+      return this;
+    }
+
+    public AuthzPrivilegeBuilder setOperationScope(HiveOperationScope operationScope) {
+      this.operationScope = operationScope;
+      return this;
+    }
+
+    public HiveAuthzPrivileges build() {
+      if (operationScope.equals(HiveOperationScope.UNKNOWN)) {
+        throw new UnsupportedOperationException("Operation scope is not set");
+      }
+
+      if (operationType.equals(HiveOperationType.UNKNOWN)) {
+        throw new UnsupportedOperationException("Operation scope is not set");
+      }
+
+      return new HiveAuthzPrivileges(inputPrivileges, outputPrivileges, operationType, operationScope);
+    }
+  }
+
+  private final Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges =
+      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+  private final Map<AuthorizableType,EnumSet<DBModelAction>>  outputPrivileges =
+      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
+  private final HiveOperationType operationType;
+  private final HiveOperationScope operationScope;
+
+  protected HiveAuthzPrivileges(Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges,
+      Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges, HiveOperationType operationType,
+      HiveOperationScope operationScope) {
+    this.inputPrivileges.putAll(inputPrivileges);
+    this.outputPrivileges.putAll(outputPrivileges);
+    this.operationScope = operationScope;
+    this.operationType = operationType;
+  }
+
+  /**
+   * @return the inputPrivileges
+   */
+  public Map<AuthorizableType, EnumSet<DBModelAction>> getInputPrivileges() {
+    return inputPrivileges;
+  }
+
+  /**
+   * @return the outputPrivileges
+   */
+  public Map<AuthorizableType, EnumSet<DBModelAction>> getOutputPrivileges() {
+    return outputPrivileges;
+  }
+
+  /**
+   * @return the operationType
+   */
+  public HiveOperationType getOperationType() {
+    return operationType;
+  }
+
+  /**
+   * @return the operationScope
+   */
+  public HiveOperationScope getOperationScope() {
+    return operationScope;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
new file mode 100644
index 0000000..73b0941
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/SentryConfigTool.java
@@ -0,0 +1,622 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive.authz;
+
+import java.security.CodeSource;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.Parser;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.log4j.Level;
+import org.apache.log4j.LogManager;
+import org.apache.sentry.Command;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.SentryPolicyFileFormatFactory;
+import org.apache.sentry.binding.hive.SentryPolicyFileFormatter;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.core.common.SentryConfigurationException;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+
+/**
+ * set the required system property to be read by HiveConf and AuthzConf
+ *
+ * @throws Exception
+ */
+// Hack, hiveConf doesn't provide a reliable way check if it found a valid
+// hive-site
+// load auth provider
+// get the configured sentry provider
+// validate policy files
+// import policy files
+public class SentryConfigTool {
+  private String sentrySiteFile = null;
+  private String policyFile = null;
+  private String query = null;
+  private String jdbcURL = null;
+  private String user = null;
+  private String passWord = null;
+  private String importPolicyFilePath = null;
+  private String exportPolicyFilePath = null;
+  private boolean listPrivs = false;
+  private boolean validate = false;
+  private boolean importOverwriteRole = false;
+  private HiveConf hiveConf = null;
+  private HiveAuthzConf authzConf = null;
+  private AuthorizationProvider sentryProvider = null;
+
+  public SentryConfigTool() {
+
+  }
+
+  public AuthorizationProvider getSentryProvider() {
+    return sentryProvider;
+  }
+
+  public void setSentryProvider(AuthorizationProvider sentryProvider) {
+    this.sentryProvider = sentryProvider;
+  }
+
+  public HiveConf getHiveConf() {
+    return hiveConf;
+  }
+
+  public void setHiveConf(HiveConf hiveConf) {
+    this.hiveConf = hiveConf;
+  }
+
+  public HiveAuthzConf getAuthzConf() {
+    return authzConf;
+  }
+
+  public void setAuthzConf(HiveAuthzConf authzConf) {
+    this.authzConf = authzConf;
+  }
+
+  public boolean isValidate() {
+    return validate;
+  }
+
+  public void setValidate(boolean validate) {
+    this.validate = validate;
+  }
+
+  public String getImportPolicyFilePath() {
+    return importPolicyFilePath;
+  }
+
+  public void setImportPolicyFilePath(String importPolicyFilePath) {
+    this.importPolicyFilePath = importPolicyFilePath;
+  }
+
+  public String getExportPolicyFilePath() {
+    return exportPolicyFilePath;
+  }
+
+  public void setExportPolicyFilePath(String exportPolicyFilePath) {
+    this.exportPolicyFilePath = exportPolicyFilePath;
+  }
+
+  public String getSentrySiteFile() {
+    return sentrySiteFile;
+  }
+
+  public void setSentrySiteFile(String sentrySiteFile) {
+    this.sentrySiteFile = sentrySiteFile;
+  }
+
+  public String getPolicyFile() {
+    return policyFile;
+  }
+
+  public void setPolicyFile(String policyFile) {
+    this.policyFile = policyFile;
+  }
+
+  public String getQuery() {
+    return query;
+  }
+
+  public void setQuery(String query) {
+    this.query = query;
+  }
+
+  public String getJdbcURL() {
+    return jdbcURL;
+  }
+
+  public void setJdbcURL(String jdbcURL) {
+    this.jdbcURL = jdbcURL;
+  }
+
+  public String getUser() {
+    return user;
+  }
+
+  public void setUser(String user) {
+    this.user = user;
+  }
+
+  public String getPassWord() {
+    return passWord;
+  }
+
+  public void setPassWord(String passWord) {
+    this.passWord = passWord;
+  }
+
+  public boolean isListPrivs() {
+    return listPrivs;
+  }
+
+  public void setListPrivs(boolean listPrivs) {
+    this.listPrivs = listPrivs;
+  }
+
+  public boolean isImportOverwriteRole() {
+    return importOverwriteRole;
+  }
+
+  public void setImportOverwriteRole(boolean importOverwriteRole) {
+    this.importOverwriteRole = importOverwriteRole;
+  }
+
+  /**
+   * set the required system property to be read by HiveConf and AuthzConf
+   * @throws Exception
+   */
+  public void setupConfig() throws Exception {
+    System.out.println("Configuration: ");
+    CodeSource src = SentryConfigTool.class.getProtectionDomain()
+        .getCodeSource();
+    if (src != null) {
+      System.out.println("Sentry package jar: " + src.getLocation());
+    }
+
+    if (getPolicyFile() != null) {
+      System.setProperty(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(),
+          getPolicyFile());
+    }
+    System.setProperty(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), "true");
+    setHiveConf(new HiveConf(SessionState.class));
+    getHiveConf().setVar(ConfVars.SEMANTIC_ANALYZER_HOOK,
+        HiveAuthzBindingHookBase.class.getName());
+    try {
+      System.out.println("Hive config: " + HiveConf.getHiveSiteLocation());
+    } catch (NullPointerException e) {
+      // Hack, hiveConf doesn't provide a reliable way check if it found a valid
+      // hive-site
+      throw new SentryConfigurationException("Didn't find a hive-site.xml");
+
+    }
+
+    if (getSentrySiteFile() != null) {
+      getHiveConf()
+          .set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, getSentrySiteFile());
+    }
+
+    setAuthzConf(HiveAuthzConf.getAuthzConf(getHiveConf()));
+    System.out.println("Sentry config: "
+        + getAuthzConf().getHiveAuthzSiteFile());
+    System.out.println("Sentry Policy: "
+        + getAuthzConf().get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar()));
+    System.out.println("Sentry server: "
+        + getAuthzConf().get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+
+    setSentryProvider(getAuthorizationProvider());
+  }
+
+  // load auth provider
+  private AuthorizationProvider getAuthorizationProvider()
+      throws IllegalStateException, SentryConfigurationException {
+    String serverName = new Server(getAuthzConf().get(
+        AuthzConfVars.AUTHZ_SERVER_NAME.getVar())).getName();
+    // get the configured sentry provider
+    AuthorizationProvider sentryProvider = null;
+    try {
+      sentryProvider = HiveAuthzBinding.getAuthProvider(getHiveConf(),
+          authzConf, serverName);
+    } catch (SentryConfigurationException eC) {
+      printConfigErrors(eC);
+    } catch (Exception e) {
+      throw new IllegalStateException("Couldn't load sentry provider ", e);
+    }
+    return sentryProvider;
+  }
+
+  // validate policy files
+  public void validatePolicy() throws Exception {
+    try {
+      getSentryProvider().validateResource(true);
+    } catch (SentryConfigurationException e) {
+      printConfigErrors(e);
+    }
+    System.out.println("No errors found in the policy file");
+  }
+
+  // import the sentry mapping data to database
+  public void importPolicy() throws Exception {
+    String requestorUserName = System.getProperty("user.name", "");
+    // get the FileFormatter according to the configuration
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
+        .createFileFormatter(authzConf);
+    // parse the input file, get the mapping data in map structure
+    Map<String, Map<String, Set<String>>> policyFileMappingData = sentryPolicyFileFormatter.parse(
+        importPolicyFilePath, authzConf);
+    // todo: here should be an validator to check the data's value, format, hierarchy
+    SentryPolicyServiceClient client = SentryServiceClientFactory.create(getAuthzConf());
+    // import the mapping data to database
+    client.importPolicy(policyFileMappingData, requestorUserName, importOverwriteRole);
+  }
+
+  // export the sentry mapping data to file
+  public void exportPolicy() throws Exception {
+    String requestorUserName = System.getProperty("user.name", "");
+    SentryPolicyServiceClient client = SentryServiceClientFactory.create(getAuthzConf());
+    // export the sentry mapping data from database to map structure
+    Map<String, Map<String, Set<String>>> policyFileMappingData = client
+        .exportPolicy(requestorUserName);
+    // get the FileFormatter according to the configuration
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = SentryPolicyFileFormatFactory
+        .createFileFormatter(authzConf);
+    // write the sentry mapping data to exportPolicyFilePath with the data in map structure
+    sentryPolicyFileFormatter.write(exportPolicyFilePath, policyFileMappingData);
+  }
+
+  // list permissions for given user
+  public void listPrivs() throws Exception {
+    getSentryProvider().validateResource(true);
+    System.out.println("Available privileges for user " + getUser() + ":");
+    Set<String> permList = getSentryProvider().listPrivilegesForSubject(
+        new Subject(getUser()));
+    for (String perms : permList) {
+      System.out.println("\t" + perms);
+    }
+    if (permList.isEmpty()) {
+      System.out.println("\t*** No permissions available ***");
+    }
+  }
+
+  // Verify the given query
+  public void verifyLocalQuery(String queryStr) throws Exception {
+    // setup Hive driver
+    SessionState session = new SessionState(getHiveConf());
+    SessionState.start(session);
+    Driver driver = new Driver(session.getConf(), getUser());
+
+    // compile the query
+    CommandProcessorResponse compilerStatus = driver
+        .compileAndRespond(queryStr);
+    if (compilerStatus.getResponseCode() != 0) {
+      String errMsg = compilerStatus.getErrorMessage();
+      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
+        printMissingPerms(getHiveConf().get(
+            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
+      }
+      throw new SemanticException("Compilation error: "
+          + compilerStatus.getErrorMessage());
+    }
+    driver.close();
+    System.out
+        .println("User " + getUser() + " has privileges to run the query");
+  }
+
+  // connect to remote HS2 and run mock query
+  public void verifyRemoteQuery(String queryStr) throws Exception {
+    Class.forName("org.apache.hive.jdbc.HiveDriver");
+    Connection conn = DriverManager.getConnection(getJdbcURL(), getUser(),
+        getPassWord());
+    Statement stmt = conn.createStatement();
+    if (!isSentryEnabledOnHiveServer(stmt)) {
+      throw new IllegalStateException("Sentry is not enabled on HiveServer2");
+    }
+    stmt.execute("set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION + "=true");
+    try {
+      stmt.execute(queryStr);
+    } catch (SQLException e) {
+      String errMsg = e.getMessage();
+      if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR)) {
+        System.out.println("User "
+            + readConfig(stmt, HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME)
+            + " has privileges to run the query");
+        return;
+      } else if (errMsg
+          .contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
+        printMissingPerms(readConfig(stmt,
+            HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
+        throw e;
+      } else {
+        throw e;
+      }
+    } finally {
+      if (!stmt.isClosed()) {
+        stmt.close();
+      }
+      conn.close();
+    }
+
+  }
+
+  // verify senty session hook is set
+  private boolean isSentryEnabledOnHiveServer(Statement stmt)
+      throws SQLException {
+    String bindingString = readConfig(stmt, HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname).toUpperCase();
+    return bindingString.contains("org.apache.sentry.binding.hive".toUpperCase())
+        && bindingString.contains("HiveAuthzBindingSessionHook".toUpperCase());
+  }
+
+  // read a config value using 'set' statement
+  private String readConfig(Statement stmt, String configKey)
+      throws SQLException {
+    ResultSet res = stmt.executeQuery("set " + configKey);
+    if (!res.next()) {
+      return null;
+    }
+    // parse key=value result format
+    String result = res.getString(1);
+    res.close();
+    return result.substring(result.indexOf("=") + 1);
+  }
+
+  // print configuration/policy file errors and warnings
+  private void printConfigErrors(SentryConfigurationException configException)
+      throws SentryConfigurationException {
+    System.out.println(" *** Found configuration problems *** ");
+    for (String errMsg : configException.getConfigErrors()) {
+      System.out.println("ERROR: " + errMsg);
+    }
+    for (String warnMsg : configException.getConfigWarnings()) {
+      System.out.println("Warning: " + warnMsg);
+    }
+    throw configException;
+  }
+
+  // extract the authorization errors from config property and print
+  private void printMissingPerms(String errMsg) {
+    if (errMsg == null || errMsg.isEmpty()) {
+      return;
+    }
+    System.out.println("*** Query compilation failed ***");
+    String perms[] = errMsg.replaceFirst(
+        ".*" + HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE, "")
+        .split(";");
+    System.out.println("Required privileges for given query:");
+    for (int count = 0; count < perms.length; count++) {
+      System.out.println(" \t " + perms[count]);
+    }
+  }
+
+  // print usage
+  private void usage(Options sentryOptions) {
+    HelpFormatter formatter = new HelpFormatter();
+    formatter.printHelp("sentry --command config-tool", sentryOptions);
+    System.exit(-1);
+  }
+
+  /**
+   * parse arguments
+   *
+   * <pre>
+   *   -d,--debug                  Enable debug output
+   *   -e,--query <arg>            Query privilege verification, requires -u
+   *   -h,--help                   Print usage
+   *   -i,--policyIni <arg>        Policy file path
+   *   -j,--jdbcURL <arg>          JDBC URL
+   *   -l,--listPrivs,--listPerms  List privilges for given user, requires -u
+   *   -p,--password <arg>         Password
+   *   -s,--sentry-site <arg>      sentry-site file path
+   *   -u,--user <arg>             user name
+   *   -v,--validate               Validate policy file
+   *   -I,--import                 Import policy file
+   *   -E,--export                 Export policy file
+   *   -o,--overwrite              Overwrite the exist role data when do the import
+   * </pre>
+   *
+   * @param args
+   */
+  private void parseArgs(String[] args) {
+    boolean enableDebug = false;
+
+    Options sentryOptions = new Options();
+
+    Option helpOpt = new Option("h", "help", false, "Print usage");
+    helpOpt.setRequired(false);
+
+    Option validateOpt = new Option("v", "validate", false,
+        "Validate policy file");
+    validateOpt.setRequired(false);
+
+    Option queryOpt = new Option("e", "query", true,
+        "Query privilege verification, requires -u");
+    queryOpt.setRequired(false);
+
+    Option listPermsOpt = new Option("l", "listPerms", false,
+        "list permissions for given user, requires -u");
+    listPermsOpt.setRequired(false);
+    Option listPrivsOpt = new Option("listPrivs", false,
+        "list privileges for given user, requires -u");
+    listPrivsOpt.setRequired(false);
+
+    Option importOpt = new Option("I", "import", true,
+        "Import policy file");
+    importOpt.setRequired(false);
+
+    Option exportOpt = new Option("E", "export", true, "Export policy file");
+    exportOpt.setRequired(false);
+    // required args
+    OptionGroup sentryOptGroup = new OptionGroup();
+    sentryOptGroup.addOption(helpOpt);
+    sentryOptGroup.addOption(validateOpt);
+    sentryOptGroup.addOption(queryOpt);
+    sentryOptGroup.addOption(listPermsOpt);
+    sentryOptGroup.addOption(listPrivsOpt);
+    sentryOptGroup.addOption(importOpt);
+    sentryOptGroup.addOption(exportOpt);
+    sentryOptGroup.setRequired(true);
+    sentryOptions.addOptionGroup(sentryOptGroup);
+
+    // optional args
+    Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL");
+    jdbcArg.setRequired(false);
+    sentryOptions.addOption(jdbcArg);
+
+    Option sentrySitePath = new Option("s", "sentry-site", true,
+        "sentry-site file path");
+    sentrySitePath.setRequired(false);
+    sentryOptions.addOption(sentrySitePath);
+
+    Option globalPolicyPath = new Option("i", "policyIni", true,
+        "Policy file path");
+    globalPolicyPath.setRequired(false);
+    sentryOptions.addOption(globalPolicyPath);
+
+    Option userOpt = new Option("u", "user", true, "user name");
+    userOpt.setRequired(false);
+    sentryOptions.addOption(userOpt);
+
+    Option passWordOpt = new Option("p", "password", true, "Password");
+    userOpt.setRequired(false);
+    sentryOptions.addOption(passWordOpt);
+
+    Option debugOpt = new Option("d", "debug", false, "enable debug output");
+    debugOpt.setRequired(false);
+    sentryOptions.addOption(debugOpt);
+
+    Option overwriteOpt = new Option("o", "overwrite", false, "enable import overwrite");
+    overwriteOpt.setRequired(false);
+    sentryOptions.addOption(overwriteOpt);
+
+    try {
+      Parser parser = new GnuParser();
+      CommandLine cmd = parser.parse(sentryOptions, args);
+
+      for (Option opt : cmd.getOptions()) {
+        if (opt.getOpt().equals("s")) {
+          setSentrySiteFile(opt.getValue());
+        } else if (opt.getOpt().equals("i")) {
+          setPolicyFile(opt.getValue());
+        } else if (opt.getOpt().equals("e")) {
+          setQuery(opt.getValue());
+        } else if (opt.getOpt().equals("j")) {
+          setJdbcURL(opt.getValue());
+        } else if (opt.getOpt().equals("u")) {
+          setUser(opt.getValue());
+        } else if (opt.getOpt().equals("p")) {
+          setPassWord(opt.getValue());
+        } else if (opt.getOpt().equals("l") || opt.getOpt().equals("listPrivs")) {
+          setListPrivs(true);
+        } else if (opt.getOpt().equals("v")) {
+          setValidate(true);
+        } else if (opt.getOpt().equals("I")) {
+          setImportPolicyFilePath(opt.getValue());
+        } else if (opt.getOpt().equals("E")) {
+          setExportPolicyFilePath(opt.getValue());
+        } else if (opt.getOpt().equals("h")) {
+          usage(sentryOptions);
+        } else if (opt.getOpt().equals("d")) {
+          enableDebug = true;
+        } else if (opt.getOpt().equals("o")) {
+          setImportOverwriteRole(true);
+        }
+      }
+
+      if (isListPrivs() && getUser() == null) {
+        throw new ParseException("Can't use -l without -u ");
+      }
+      if (getQuery() != null && getUser() == null) {
+        throw new ParseException("Must use -u with -e ");
+      }
+    } catch (ParseException e1) {
+      usage(sentryOptions);
+    }
+
+    if (!enableDebug) {
+      // turn off log
+      LogManager.getRootLogger().setLevel(Level.OFF);
+    }
+  }
+
+  public static class CommandImpl implements Command {
+    @Override
+    public void run(String[] args) throws Exception {
+      SentryConfigTool sentryTool = new SentryConfigTool();
+
+      try {
+        // parse arguments
+        sentryTool.parseArgs(args);
+
+        // load configuration
+        sentryTool.setupConfig();
+
+        // validate configuration
+        if (sentryTool.isValidate()) {
+          sentryTool.validatePolicy();
+        }
+
+        if (!StringUtils.isEmpty(sentryTool.getImportPolicyFilePath())) {
+          sentryTool.importPolicy();
+        }
+
+        if (!StringUtils.isEmpty(sentryTool.getExportPolicyFilePath())) {
+          sentryTool.exportPolicy();
+        }
+
+        // list permissions for give user
+        if (sentryTool.isListPrivs()) {
+          sentryTool.listPrivs();
+        }
+
+        // verify given query
+        if (sentryTool.getQuery() != null) {
+          if (sentryTool.getJdbcURL() != null) {
+            sentryTool.verifyRemoteQuery(sentryTool.getQuery());
+          } else {
+            sentryTool.verifyLocalQuery(sentryTool.getQuery());
+          }
+        }
+      } catch (Exception e) {
+        System.out.println("Sentry tool reported Errors: " + e.getMessage());
+        e.printStackTrace(System.out);
+        System.exit(1);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
new file mode 100644
index 0000000..5a89af2
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.conf;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HiveAuthzConf extends Configuration {
+
+  /**
+   * Configuration key used in hive-site.xml to point at sentry-site.xml
+   */
+  public static final String HIVE_ACCESS_CONF_URL = "hive.access.conf.url";
+  public static final String HIVE_SENTRY_CONF_URL = "hive.sentry.conf.url";
+  public static final String HIVE_ACCESS_SUBJECT_NAME = "hive.access.subject.name";
+  public static final String HIVE_SENTRY_SUBJECT_NAME = "hive.sentry.subject.name";
+  public static final String HIVE_SENTRY_AUTH_ERRORS = "sentry.hive.authorization.errors";
+  public static final String HIVE_SENTRY_MOCK_COMPILATION = "sentry.hive.mock.compilation";
+  public static final String HIVE_SENTRY_MOCK_ERROR = "sentry.hive.mock.error";
+  public static final String HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE = "No valid privileges";
+  /**
+   * Property used to persist the role set in the session. This is not public for now.
+   */
+  public static final String SENTRY_ACTIVE_ROLE_SET = "hive.sentry.active.role.set";
+
+  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST =
+      "hive.sentry.security.command.whitelist";
+  public static final String HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT =
+      "set,reset,reload";
+
+  public static final String HIVE_SENTRY_SERDE_WHITELIST = "hive.sentry.serde.whitelist";
+  public static final String HIVE_SENTRY_SERDE_WHITELIST_DEFAULT = "org.apache.hadoop.hive.serde2";
+
+  // Disable the serde Uri privileges by default for backward compatibilities.
+  public static final String HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED = "hive.sentry.turn.on.serde.uri.privileges";
+  public static final boolean HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT = false;
+
+  public static final String HIVE_UDF_WHITE_LIST =
+      "concat,substr,substring,space,repeat,ascii,lpad,rpad,size,round,floor,sqrt,ceil," +
+          "ceiling,rand,abs,pmod,ln,log2,sin,asin,cos,acos,log10,log,exp,power,pow,sign,pi," +
+          "degrees,radians,atan,tan,e,conv,bin,hex,unhex,base64,unbase64,encode,decode,upper," +
+          "lower,ucase,lcase,trim,ltrim,rtrim,length,reverse,field,find_in_set,initcap,like," +
+          "rlike,regexp,regexp_replace,regexp_extract,parse_url,nvl,split,str_to_map,translate" +
+          ",positive,negative,day,dayofmonth,month,year,hour,minute,second,from_unixtime," +
+          "to_date,weekofyear,last_day,date_add,date_sub,datediff,add_months,get_json_object," +
+          "xpath_string,xpath_boolean,xpath_number,xpath_double,xpath_float,xpath_long," +
+          "xpath_int,xpath_short,xpath,+,-,*,/,%,div,&,|,^,~,current_database,isnull," +
+          "isnotnull,if,in,and,or,=,==,<=>,!=,<>,<,<=,>,>=,not,!,between,ewah_bitmap_and," +
+          "ewah_bitmap_or,ewah_bitmap_empty,boolean,tinyint,smallint,int,bigint,float,double," +
+          "string,date,timestamp,binary,decimal,varchar,char,max,min,sum,count,avg,std,stddev," +
+          "stddev_pop,stddev_samp,variance,var_pop,var_samp,covar_pop,covar_samp,corr," +
+          "histogram_numeric,percentile_approx,collect_set,collect_list,ngrams," +
+          "context_ngrams,ewah_bitmap,compute_stats,percentile," +
+          "array,assert_true,map,struct,named_struct,create_union,case,when,hash,coalesce," +
+          "index,in_file,instr,locate,elt,concat_ws,sort_array," +
+          "array_contains,sentences,map_keys,map_values,format_number,printf,greatest,least," +
+          "from_utc_timestamp,to_utc_timestamp,unix_timestamp,to_unix_timestamp,explode," +
+          "inline,json_tuple,parse_url_tuple,posexplode,stack,lead,lag,row_number,rank," +
+          "dense_rank,percent_rank,cume_dist,ntile,first_value,last_value,noop,noopwithmap," +
+          "noopstreaming,noopwithmapstreaming,windowingtablefunction,matchpath";
+
+  public static final String HIVE_UDF_BLACK_LIST = "reflect,reflect2,java_method";
+
+  /**
+   * Config setting definitions
+   */
+  public static enum AuthzConfVars {
+    AUTHZ_PROVIDER("sentry.provider",
+      "org.apache.sentry.provider.common.HadoopGroupResourceAuthorizationProvider"),
+    AUTHZ_PROVIDER_RESOURCE("sentry.hive.provider.resource", ""),
+    AUTHZ_PROVIDER_BACKEND("sentry.hive.provider.backend", "org.apache.sentry.provider.file.SimpleFileProviderBackend"),
+    AUTHZ_POLICY_ENGINE("sentry.hive.policy.engine", "org.apache.sentry.policy.db.SimpleDBPolicyEngine"),
+    AUTHZ_POLICY_FILE_FORMATTER(
+        "sentry.hive.policy.file.formatter",
+        "org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter"),
+    AUTHZ_SERVER_NAME("sentry.hive.server", ""),
+    AUTHZ_RESTRICT_DEFAULT_DB("sentry.hive.restrict.defaultDB", "false"),
+    SENTRY_TESTING_MODE("sentry.hive.testing.mode", "false"),
+    AUTHZ_ALLOW_HIVE_IMPERSONATION("sentry.hive.allow.hive.impersonation", "false"),
+    AUTHZ_ONFAILURE_HOOKS("sentry.hive.failure.hooks", ""),
+    AUTHZ_METASTORE_SERVICE_USERS("sentry.metastore.service.users", null),
+    AUTHZ_SYNC_ALTER_WITH_POLICY_STORE("sentry.hive.sync.alter", "true"),
+    AUTHZ_SYNC_CREATE_WITH_POLICY_STORE("sentry.hive.sync.create", "false"),
+    AUTHZ_SYNC_DROP_WITH_POLICY_STORE("sentry.hive.sync.drop", "true"),
+
+    AUTHZ_PROVIDER_DEPRECATED("hive.sentry.provider",
+      "org.apache.sentry.provider.file.ResourceAuthorizationProvider"),
+    AUTHZ_PROVIDER_RESOURCE_DEPRECATED("hive.sentry.provider.resource", ""),
+    AUTHZ_SERVER_NAME_DEPRECATED("hive.sentry.server", ""),
+    AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED("hive.sentry.restrict.defaultDB", "false"),
+    SENTRY_TESTING_MODE_DEPRECATED("hive.sentry.testing.mode", "false"),
+    AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED("hive.sentry.allow.hive.impersonation", "false"),
+    AUTHZ_ONFAILURE_HOOKS_DEPRECATED("hive.sentry.failure.hooks", "");
+
+    private final String varName;
+    private final String defaultVal;
+
+    AuthzConfVars(String varName, String defaultVal) {
+      this.varName = varName;
+      this.defaultVal = defaultVal;
+    }
+
+    public String getVar() {
+      return varName;
+    }
+
+    public String getDefault() {
+      return defaultVal;
+    }
+
+    public static String getDefault(String varName) {
+      for (AuthzConfVars oneVar : AuthzConfVars.values()) {
+        if(oneVar.getVar().equalsIgnoreCase(varName)) {
+          return oneVar.getDefault();
+        }
+      }
+      return null;
+    }
+  }
+
+  // map of current property names - > deprecated property names.
+  // The binding layer code should work if the deprecated property names are provided,
+  // as long as the new property names aren't also provided.  Since the binding code
+  // only calls the new property names, we require a map from current names to deprecated
+  // names in order to check if the deprecated name of a property was set.
+  private static final Map<String, AuthzConfVars> currentToDeprecatedProps =
+      new HashMap<String, AuthzConfVars>();
+  static {
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER.getVar(), AuthzConfVars.AUTHZ_PROVIDER_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(), AuthzConfVars.AUTHZ_PROVIDER_RESOURCE_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_SERVER_NAME.getVar(), AuthzConfVars.AUTHZ_SERVER_NAME_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.SENTRY_TESTING_MODE.getVar(), AuthzConfVars.SENTRY_TESTING_MODE_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar(), AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION_DEPRECATED);
+    currentToDeprecatedProps.put(AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), AuthzConfVars.AUTHZ_ONFAILURE_HOOKS_DEPRECATED);
+  };
+
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzConf.class);
+  public static final String AUTHZ_SITE_FILE = "sentry-site.xml";
+  private final String hiveAuthzSiteFile;
+
+  public HiveAuthzConf(URL hiveAuthzSiteURL) {
+    super();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
+    addResource(hiveAuthzSiteURL);
+    applySystemProperties();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
+    this.hiveAuthzSiteFile = hiveAuthzSiteURL.toString();
+  }
+  /**
+   * Apply system properties to this object if the property name is defined in ConfVars
+   * and the value is non-null and not an empty string.
+   */
+  private void applySystemProperties() {
+    Map<String, String> systemProperties = getConfSystemProperties();
+    for (Entry<String, String> systemProperty : systemProperties.entrySet()) {
+      this.set(systemProperty.getKey(), systemProperty.getValue());
+    }
+  }
+
+  /**
+   * This method returns a mapping from config variable name to its value for all config variables
+   * which have been set using System properties
+   */
+  public static Map<String, String> getConfSystemProperties() {
+    Map<String, String> systemProperties = new HashMap<String, String>();
+
+    for (AuthzConfVars oneVar : AuthzConfVars.values()) {
+      String value = System.getProperty(oneVar.getVar());
+      if (value != null && value.length() > 0) {
+        systemProperties.put(oneVar.getVar(), value);
+      }
+    }
+    return systemProperties;
+  }
+
+  @Override
+  public String get(String varName) {
+    return get(varName, null);
+  }
+
+  @Override
+  public String get(String varName, String defaultVal) {
+    String retVal = super.get(varName);
+    if (retVal == null) {
+      // check if the deprecated value is set here
+      if (currentToDeprecatedProps.containsKey(varName)) {
+        retVal = super.get(currentToDeprecatedProps.get(varName).getVar());
+      }
+      if (retVal == null) {
+        retVal = AuthzConfVars.getDefault(varName);
+      } else {
+        LOG.warn("Using the deprecated config setting " + currentToDeprecatedProps.get(varName).getVar() +
+            " instead of " + varName);
+      }
+    }
+    if (retVal == null) {
+      retVal = defaultVal;
+    }
+    return retVal;
+  }
+
+  public String getHiveAuthzSiteFile() {
+    return hiveAuthzSiteFile;
+  }
+
+  /**
+   * Extract the authz config file path from given hive conf and load the authz config
+   * @param hiveConf
+   * @return
+   * @throws IllegalArgumentException
+   */
+  public static HiveAuthzConf getAuthzConf(HiveConf hiveConf)
+    throws IllegalArgumentException {
+    boolean depreicatedConfigFile = false;
+
+    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+    if (hiveAuthzConf == null
+        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
+      depreicatedConfigFile = true;
+    }
+
+    if (hiveAuthzConf == null
+        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      throw new IllegalArgumentException("Configuration key "
+          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+          + "' is invalid.");
+    }
+
+    try {
+      return new HiveAuthzConf(new URL(hiveAuthzConf));
+    } catch (MalformedURLException e) {
+      if (depreicatedConfigFile) {
+        throw new IllegalArgumentException("Configuration key "
+            + HiveAuthzConf.HIVE_ACCESS_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      } else {
+        throw new IllegalArgumentException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
new file mode 100644
index 0000000..b658922
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/InvalidConfigurationException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.conf;
+
+public class InvalidConfigurationException extends Exception
+{
+  private static final long serialVersionUID = 1L;
+
+  //Parameterless Constructor
+  public InvalidConfigurationException() {}
+
+  //Constructor that accepts a message
+  public InvalidConfigurationException(String message)
+  {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
new file mode 100644
index 0000000..196bd2b
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
@@ -0,0 +1,412 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.metastore;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/***
+ * This class is the wrapper of ObjectStore which is the interface between the
+ * application logic and the database store. Do the authorization or filter the
+ * result when processing the metastore request.
+ * eg:
+ * Callers will only receive the objects back which they have privileges to
+ * access.
+ * If there is a request for the object list(like getAllTables()), the result
+ * will be filtered to exclude object the requestor doesn't have privilege to
+ * access.
+ */
+public class AuthorizingObjectStoreBase extends ObjectStore {
+  private static ImmutableSet<String> serviceUsers;
+  private static HiveConf hiveConf;
+  private static HiveAuthzConf authzConf;
+  private static HiveAuthzBinding hiveAuthzBinding;
+  private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or insufficient privileges to access: ";
+  private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist or insufficient privileges to access: ";
+
+  @Override
+  public List<String> getDatabases(String pattern) throws MetaException {
+    return filterDatabases(super.getDatabases(pattern));
+  }
+
+  @Override
+  public List<String> getAllDatabases() throws MetaException {
+    return filterDatabases(super.getAllDatabases());
+  }
+
+  @Override
+  public Database getDatabase(String name) throws NoSuchObjectException {
+    Database db = super.getDatabase(name);
+    try {
+      if (filterDatabases(Lists.newArrayList(name)).isEmpty()) {
+        throw new NoSuchObjectException(getNoAccessMessageForDB(name));
+      }
+    } catch (MetaException e) {
+      throw new NoSuchObjectException("Failed to authorized access to " + name
+          + " : " + e.getMessage());
+    }
+    return db;
+  }
+
+  @Override
+  public Table getTable(String dbName, String tableName) throws MetaException {
+    Table table = super.getTable(dbName, tableName);
+    if (table == null
+        || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      return null;
+    }
+    return table;
+  }
+
+  @Override
+  public Partition getPartition(String dbName, String tableName,
+      List<String> part_vals) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getPartition(dbName, tableName, part_vals);
+  }
+
+  @Override
+  public List<Partition> getPartitions(String dbName, String tableName,
+      int maxParts) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getPartitions(dbName, tableName, maxParts);
+  }
+
+  @Override
+  public List<String> getTables(String dbName, String pattern)
+      throws MetaException {
+    return filterTables(dbName, super.getTables(dbName, pattern));
+  }
+ 
+  @Override
+  public List<Table> getTableObjectsByName(String dbname, List<String> tableNames)
+      throws MetaException, UnknownDBException {
+    return super.getTableObjectsByName(dbname, filterTables(dbname, tableNames));
+  }
+
+  @Override
+  public List<String> getAllTables(String dbName) throws MetaException {
+    return filterTables(dbName, super.getAllTables(dbName));
+  }
+
+  @Override
+  public List<String> listTableNamesByFilter(String dbName, String filter,
+      short maxTables) throws MetaException {
+    return filterTables(dbName,
+        super.listTableNamesByFilter(dbName, filter, maxTables));
+  }
+
+  @Override
+  public List<String> listPartitionNames(String dbName, String tableName,
+      short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNames(dbName, tableName, max_parts);
+  }
+
+  @Override
+  public List<String> listPartitionNamesByFilter(String dbName,
+      String tableName, String filter, short max_parts) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.listPartitionNamesByFilter(dbName, tableName, filter,
+        max_parts);
+  }
+
+  @Override
+  public Index getIndex(String dbName, String origTableName, String indexName)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.getIndex(dbName, origTableName, indexName);
+  }
+
+  @Override
+  public List<Index> getIndexes(String dbName, String origTableName, int max)
+      throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.getIndexes(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<String> listIndexNames(String dbName, String origTableName,
+      short max) throws MetaException {
+    if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+    }
+    return super.listIndexNames(dbName, origTableName, max);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByFilter(String dbName,
+      String tblName, String filter, short maxParts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByFilter(dbName, tblName, filter, maxParts);
+  }
+
+  @Override
+  public List<Partition> getPartitionsByNames(String dbName, String tblName,
+      List<String> partNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsByNames(dbName, tblName, partNames);
+  }
+
+  @Override
+  public Partition getPartitionWithAuth(String dbName, String tblName,
+      List<String> partVals, String user_name, List<String> group_names)
+      throws MetaException, NoSuchObjectException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionWithAuth(dbName, tblName, partVals, user_name,
+        group_names);
+  }
+
+  @Override
+  public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
+      short maxParts, String userName, List<String> groupNames)
+      throws MetaException, NoSuchObjectException, InvalidObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName,
+        groupNames);
+  }
+
+  @Override
+  public List<String> listPartitionNamesPs(String dbName, String tblName,
+      List<String> part_vals, short max_parts) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts);
+  }
+
+  @Override
+  public List<Partition> listPartitionsPsWithAuth(String dbName,
+      String tblName, List<String> part_vals, short max_parts, String userName,
+      List<String> groupNames) throws MetaException, InvalidObjectException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.listPartitionsPsWithAuth(dbName, tblName, part_vals,
+        max_parts, userName, groupNames);
+  }
+
+  @Override
+  public ColumnStatistics getTableColumnStatistics(String dbName,
+      String tableName, List<String> colNames) throws MetaException,
+      NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+    }
+    return super.getTableColumnStatistics(dbName, tableName, colNames);
+  }
+
+  @Override
+  public List<ColumnStatistics> getPartitionColumnStatistics(
+      String dbName, String tblName, List<String> partNames,
+      List<String> colNames) throws MetaException, NoSuchObjectException {
+    if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+      throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+    }
+    return super.getPartitionColumnStatistics(dbName, tblName, partNames,
+        colNames);
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDatabases(List<String> dbList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(),
+            dbList, HiveOperation.SHOWDATABASES, getUserName());
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting DB list " + e.getMessage());
+      }
+    } else {
+      return dbList;
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  protected List<String> filterTables(String dbName, List<String> tabList)
+      throws MetaException {
+    if (needsAuthorization(getUserName())) {
+      try {
+        return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(),
+            tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+      } catch (SemanticException e) {
+        throw new MetaException("Error getting Table list " + e.getMessage());
+      }
+    } else {
+      return tabList;
+    }
+  }
+
+  /**
+   * load Hive auth provider
+   *
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      try {
+        hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore,
+            getHiveConf(), getAuthzConf());
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private ImmutableSet<String> getServiceUsers() throws MetaException {
+    if (serviceUsers == null) {
+      serviceUsers = ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings(
+          AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { "" }))));
+    }
+    return serviceUsers;
+  }
+
+  private HiveConf getHiveConf() {
+    if (hiveConf == null) {
+      hiveConf = new HiveConf(getConf(), this.getClass());
+    }
+    return hiveConf;
+  }
+
+  private HiveAuthzConf getAuthzConf() throws MetaException {
+    if (authzConf == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+    }
+    return authzConf;
+  }
+
+  /**
+   * Extract the user from underlying auth subsystem
+   * @return
+   * @throws MetaException
+   */
+  private String getUserName() throws MetaException {
+    try {
+      return Utils.getUGI().getShortUserName();
+    } catch (LoginException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    }
+  }
+
+  /**
+   * Check if the give user needs to be validated.
+   * @param userName
+   * @return
+   */
+  private boolean needsAuthorization(String userName) throws MetaException {
+    return !getServiceUsers().contains(userName.trim());
+  }
+
+  private static Set<String> toTrimed(Set<String> s) {
+    Set<String> result = Sets.newHashSet();
+    for (String v : s) {
+      result.add(v.trim());
+    }
+    return result;
+  }
+
+  protected String getNoAccessMessageForTable(String dbName, String tableName) {
+    return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">";
+  }
+
+  private String getNoAccessMessageForDB(String dbName) {
+    return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">";
+  }
+}


[3/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)

Posted by sd...@apache.org.
http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
new file mode 100644
index 0000000..fb7d246
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
@@ -0,0 +1,450 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.metastore;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.core.common.utils.PathUtils;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.core.model.db.Table;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
+
+/**
+ * Sentry binding for Hive Metastore. The binding is integrated into Metastore
+ * via the pre-event listener which are fired prior to executing the metadata
+ * action. This point we are only authorizing metadata writes since the listners
+ * are not fired from read events. Each action builds a input and output
+ * hierarchy as per the objects used in the given operations. This is then
+ * passed down to the hive binding which handles the authorization. This ensures
+ * that we follow the same privilege model and policies.
+ */
+public abstract class MetastoreAuthzBindingBase extends MetaStorePreEventListener {
+
+  /**
+   * Build the set of object hierarchies ie fully qualified db model objects
+   */
+  protected static class HierarcyBuilder {
+    private List<List<DBModelAuthorizable>> authHierarchy;
+
+    public HierarcyBuilder() {
+      authHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+    }
+
+    public HierarcyBuilder addServerToOutput(Server server) {
+      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
+      serverHierarchy.add(server);
+      authHierarchy.add(serverHierarchy);
+      return this;
+    }
+
+    public HierarcyBuilder addDbToOutput(Server server, String dbName) {
+      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
+      addServerToOutput(server);
+      dbHierarchy.add(server);
+      dbHierarchy.add(new Database(dbName));
+      authHierarchy.add(dbHierarchy);
+      return this;
+    }
+
+    public HierarcyBuilder addUriToOutput(Server server, String uriPath,
+        String warehouseDirPath) throws MetaException {
+      List<DBModelAuthorizable> uriHierarchy = new ArrayList<DBModelAuthorizable>();
+      addServerToOutput(server);
+      uriHierarchy.add(server);
+      try {
+        uriHierarchy.add(new AccessURI(PathUtils.parseDFSURI(warehouseDirPath,
+            uriPath)));
+      } catch (URISyntaxException e) {
+        throw new MetaException("Error paring the URI " + e.getMessage());
+      }
+      authHierarchy.add(uriHierarchy);
+      return this;
+    }
+
+    public HierarcyBuilder addTableToOutput(Server server, String dbName,
+        String tableName) {
+      List<DBModelAuthorizable> tableHierarchy = new ArrayList<DBModelAuthorizable>();
+      addDbToOutput(server, dbName);
+      tableHierarchy.add(server);
+      tableHierarchy.add(new Database(dbName));
+      tableHierarchy.add(new Table(tableName));
+      authHierarchy.add(tableHierarchy);
+      return this;
+    }
+
+    public List<List<DBModelAuthorizable>> build() {
+      return authHierarchy;
+    }
+  }
+
+  private HiveAuthzConf authzConf;
+  private final Server authServer;
+  private final HiveConf hiveConf;
+  private final ImmutableSet<String> serviceUsers;
+  private HiveAuthzBinding hiveAuthzBinding;
+  private final String warehouseDir;
+  protected static boolean sentryCacheOutOfSync = false;
+
+  public MetastoreAuthzBindingBase(Configuration config) throws Exception {
+    super(config);
+    String hiveAuthzConf = config.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+    if (hiveAuthzConf == null
+        || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      throw new IllegalArgumentException("Configuration key "
+          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+          + "' is invalid.");
+    }
+    try {
+      authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+    } catch (MalformedURLException e) {
+      throw new IllegalArgumentException("Configuration key "
+          + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " specifies a malformed URL '"
+          + hiveAuthzConf + "'", e);
+    }
+    hiveConf = new HiveConf(config, this.getClass());
+    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME
+        .getVar()));
+    serviceUsers = ImmutableSet.copyOf(toTrimedLower(Sets.newHashSet(authzConf
+        .getStrings(AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(),
+            new String[] { "" }))));
+    warehouseDir = hiveConf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
+
+  }
+
+  /**
+   * Main listener callback which is the entry point for Sentry
+   */
+  @Override
+  public void onEvent(PreEventContext context) throws MetaException,
+      NoSuchObjectException, InvalidOperationException {
+
+    if (!needsAuthorization(getUserName())) {
+      return;
+    }
+    switch (context.getEventType()) {
+    case CREATE_TABLE:
+      authorizeCreateTable((PreCreateTableEvent) context);
+      break;
+    case DROP_TABLE:
+      authorizeDropTable((PreDropTableEvent) context);
+      break;
+    case ALTER_TABLE:
+      authorizeAlterTable((PreAlterTableEvent) context);
+      break;
+    case ADD_PARTITION:
+      authorizeAddPartition((PreAddPartitionEvent) context);
+      break;
+    case DROP_PARTITION:
+      authorizeDropPartition((PreDropPartitionEvent) context);
+      break;
+    case ALTER_PARTITION:
+      authorizeAlterPartition((PreAlterPartitionEvent) context);
+      break;
+    case CREATE_DATABASE:
+      authorizeCreateDatabase();
+      break;
+    case DROP_DATABASE:
+      authorizeDropDatabase((PreDropDatabaseEvent) context);
+      break;
+    case LOAD_PARTITION_DONE:
+      // noop for now
+      break;
+    default:
+      break;
+    }
+  }
+
+  private void authorizeCreateDatabase()
+      throws InvalidOperationException, MetaException {
+    authorizeMetastoreAccess(HiveOperation.CREATEDATABASE,
+        new HierarcyBuilder().addServerToOutput(getAuthServer()).build(),
+        new HierarcyBuilder().addServerToOutput(getAuthServer()).build());
+  }
+
+  private void authorizeDropDatabase(PreDropDatabaseEvent context)
+      throws InvalidOperationException, MetaException {
+    authorizeMetastoreAccess(HiveOperation.DROPDATABASE,
+ new HierarcyBuilder()
+.addDbToOutput(getAuthServer(),
+            context.getDatabase().getName()).build(),
+        new HierarcyBuilder().addDbToOutput(getAuthServer(),
+            context.getDatabase().getName()).build());
+  }
+
+  private void authorizeCreateTable(PreCreateTableEvent context)
+      throws InvalidOperationException, MetaException {
+    HierarcyBuilder inputBuilder = new HierarcyBuilder();
+    inputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
+    HierarcyBuilder outputBuilder = new HierarcyBuilder();
+    outputBuilder.addDbToOutput(getAuthServer(), context.getTable().getDbName());
+
+    if (!StringUtils.isEmpty(context.getTable().getSd().getLocation())) {
+      String uriPath;
+      try {
+        uriPath = PathUtils.parseDFSURI(warehouseDir,
+            getSdLocation(context.getTable().getSd()));
+      } catch(URISyntaxException e) {
+        throw new MetaException(e.getMessage());
+      }
+      inputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
+    }
+    authorizeMetastoreAccess(HiveOperation.CREATETABLE, inputBuilder.build(),
+        outputBuilder.build());
+  }
+
+  private void authorizeDropTable(PreDropTableEvent context)
+      throws InvalidOperationException, MetaException {
+    authorizeMetastoreAccess(
+        HiveOperation.DROPTABLE,
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getTable().getDbName(), context.getTable().getTableName())
+            .build(),
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getTable().getDbName(), context.getTable().getTableName())
+            .build());
+  }
+
+  private void authorizeAlterTable(PreAlterTableEvent context)
+      throws InvalidOperationException, MetaException {
+    /*
+     * There are multiple alter table options and it's tricky to figure which is
+     * attempted here. Currently all alter table needs full level privilege
+     * except the for setting location which also needs a privile on URI. Hence
+     * we set initially set the operation to ALTERTABLE_ADDCOLS. If the client
+     * has specified the location, then change to ALTERTABLE_LOCATION
+     */
+    HiveOperation operation = HiveOperation.ALTERTABLE_ADDCOLS;
+    HierarcyBuilder inputBuilder = new HierarcyBuilder();
+    inputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
+        .getDbName(), context.getOldTable().getTableName());
+    HierarcyBuilder outputBuilder = new HierarcyBuilder();
+    outputBuilder.addTableToOutput(getAuthServer(), context.getOldTable()
+        .getDbName(), context.getOldTable().getTableName());
+
+    // if the operation requires location change, then add URI privilege check
+    String oldLocationUri;
+    String newLocationUri;
+    try {
+      oldLocationUri = PathUtils.parseDFSURI(warehouseDir,
+          getSdLocation(context.getOldTable().getSd()));
+      newLocationUri = PathUtils.parseDFSURI(warehouseDir,
+          getSdLocation(context.getNewTable().getSd()));
+    } catch (URISyntaxException e) {
+      throw new MetaException(e.getMessage());
+    }
+    if (oldLocationUri.compareTo(newLocationUri) != 0) {
+      outputBuilder.addUriToOutput(getAuthServer(), newLocationUri,
+          warehouseDir);
+      operation = HiveOperation.ALTERTABLE_LOCATION;
+    }
+    authorizeMetastoreAccess(
+        operation,
+        inputBuilder.build(), outputBuilder.build());
+
+  }
+
+  private void authorizeAddPartition(PreAddPartitionEvent context)
+      throws InvalidOperationException, MetaException, NoSuchObjectException {
+    for (Partition mapiPart : context.getPartitions()) {
+	    HierarcyBuilder inputBuilder = new HierarcyBuilder();
+      inputBuilder.addTableToOutput(getAuthServer(), mapiPart
+          .getDbName(), mapiPart.getTableName());
+      HierarcyBuilder outputBuilder = new HierarcyBuilder();
+	    outputBuilder.addTableToOutput(getAuthServer(), mapiPart
+	        .getDbName(), mapiPart.getTableName());
+	    // check if we need to validate URI permissions when storage location is
+	    // non-default, ie something not under the parent table
+
+      String partitionLocation = null;
+      if (mapiPart.isSetSd()) {
+        partitionLocation = mapiPart.getSd().getLocation();
+	    }
+	    if (!StringUtils.isEmpty(partitionLocation)) {
+	      String tableLocation = context
+	          .getHandler()
+	          .get_table(mapiPart.getDbName(),
+	              mapiPart.getTableName()).getSd().getLocation();
+	      String uriPath;
+	      try {
+	        uriPath = PathUtils.parseDFSURI(warehouseDir, mapiPart
+	            .getSd().getLocation());
+	      } catch (URISyntaxException e) {
+	        throw new MetaException(e.getMessage());
+	      }
+        if (!partitionLocation.equals(tableLocation) &&
+            !partitionLocation.startsWith(tableLocation + File.separator)) {
+          outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
+	      }
+	    }
+      authorizeMetastoreAccess(HiveOperation.ALTERTABLE_ADDPARTS,
+	        inputBuilder.build(), outputBuilder.build());
+    }
+  }
+
+  protected void authorizeDropPartition(PreDropPartitionEvent context)
+      throws InvalidOperationException, MetaException {
+    authorizeMetastoreAccess(
+        HiveOperation.ALTERTABLE_DROPPARTS,
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getPartition().getDbName(),
+            context.getPartition().getTableName()).build(),
+        new HierarcyBuilder().addTableToOutput(getAuthServer(),
+            context.getPartition().getDbName(),
+            context.getPartition().getTableName()).build());
+  }
+
+  private void authorizeAlterPartition(PreAlterPartitionEvent context)
+      throws InvalidOperationException, MetaException, NoSuchObjectException {
+    /*
+     * There are multiple alter partition options and it's tricky to figure out
+     * which is attempted here. Currently all alter partition need full level
+     * privilege except the for setting location which also needs a privilege on
+     * URI. Currently we don't try to distinguish the operation type. All alter
+     * partitions are treated as set-location
+     */
+    HierarcyBuilder inputBuilder = new HierarcyBuilder().addTableToOutput(
+        getAuthServer(), context.getDbName(), context.getTableName());
+    HierarcyBuilder outputBuilder = new HierarcyBuilder().addTableToOutput(
+        getAuthServer(), context.getDbName(), context.getTableName());
+
+    Partition partition = context.getNewPartition();
+    String partitionLocation = getSdLocation(partition.getSd());
+    if (!StringUtils.isEmpty(partitionLocation)) {
+      String tableLocation = context.getHandler().get_table(
+          partition.getDbName(), partition.getTableName()).getSd().getLocation();
+
+      String uriPath;
+      try {
+        uriPath = PathUtils.parseDFSURI(warehouseDir, partitionLocation);
+        } catch (URISyntaxException e) {
+        throw new MetaException(e.getMessage());
+      }
+      if (!partitionLocation.startsWith(tableLocation + File.separator)) {
+        outputBuilder.addUriToOutput(getAuthServer(), uriPath, warehouseDir);
+      }
+    }
+    authorizeMetastoreAccess(
+        HiveOperation.ALTERPARTITION_LOCATION,
+        inputBuilder.build(), outputBuilder.build());
+  }
+
+  protected InvalidOperationException invalidOperationException(Exception e) {
+    InvalidOperationException ex = new InvalidOperationException(e.getMessage());
+    ex.initCause(e.getCause());
+    return ex;
+  }
+
+  /**
+   * Assemble the required privileges and requested privileges. Validate using
+   * Hive bind auth provider
+   * @param hiveOp
+   * @param inputHierarchy
+   * @param outputHierarchy
+   * @throws InvalidOperationException
+   */
+  protected abstract void authorizeMetastoreAccess(HiveOperation hiveOp,
+      List<List<DBModelAuthorizable>> inputHierarchy,
+      List<List<DBModelAuthorizable>> outputHierarchy)
+      throws InvalidOperationException;
+
+  public Server getAuthServer() {
+    return authServer;
+  }
+
+  private boolean needsAuthorization(String userName) {
+    return !serviceUsers.contains(userName);
+  }
+
+  private static Set<String> toTrimedLower(Set<String> s) {
+    Set<String> result = Sets.newHashSet();
+    for (String v : s) {
+      result.add(v.trim().toLowerCase());
+    }
+    return result;
+  }
+
+  protected HiveAuthzBinding getHiveAuthzBinding() throws Exception {
+    if (hiveAuthzBinding == null) {
+      hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore, hiveConf, authzConf);
+    }
+    return hiveAuthzBinding;
+  }
+
+  protected String getUserName() throws MetaException {
+    try {
+      return Utils.getUGI().getShortUserName();
+    } catch (LoginException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to get username " + e.getMessage());
+    }
+  }
+
+  private String getSdLocation(StorageDescriptor sd) {
+    if (sd == null) {
+      return "";
+    } else {
+      return sd.getLocation();
+    }
+  }
+
+  public static boolean isSentryCacheOutOfSync() {
+    return sentryCacheOutOfSync;
+  }
+
+  public static void setSentryCacheOutOfSync(boolean sentryCacheOutOfSync) {
+    MetastoreAuthzBindingBase.sentryCacheOutOfSync = sentryCacheOutOfSync;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
new file mode 100644
index 0000000..b5df287
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryHiveMetaStoreClient.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.metastore;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.thrift.TException;
+
+public class SentryHiveMetaStoreClient extends HiveMetaStoreClient implements
+    IMetaStoreClient {
+
+  private HiveAuthzBinding hiveAuthzBinding;
+  private HiveAuthzConf authzConf;
+
+  public SentryHiveMetaStoreClient(HiveConf conf) throws MetaException {
+    super(conf);
+  }
+
+  public SentryHiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader)
+      throws MetaException {
+    super(conf, hookLoader);
+  }
+
+  @Override
+  public List<String> getDatabases(String databasePattern) throws MetaException {
+    return filterDatabases(super.getDatabases(databasePattern));
+  }
+
+  @Override
+  public List<String> getAllDatabases() throws MetaException {
+    return filterDatabases(super.getAllDatabases());
+  }
+
+  @Override
+  public List<String> getTables(String dbName, String tablePattern)
+      throws MetaException {
+    return filterTables(dbName, super.getTables(dbName, tablePattern));
+  }
+
+  @Override
+  public List<String> getAllTables(String dbName) throws MetaException {
+    return filterTables(dbName, super.getAllTables(dbName));
+  }
+
+  @Override
+  public List<String> listTableNamesByFilter(String dbName, String filter,
+      short maxTables) throws InvalidOperationException, UnknownDBException,
+      TException {
+    return filterTables(dbName,
+        super.listTableNamesByFilter(dbName, filter, maxTables));
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   *
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDatabases(List<String> dbList)
+      throws MetaException {
+    try {
+      return HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(),
+          dbList, HiveOperation.SHOWDATABASES, getUserName());
+    } catch (SemanticException e) {
+      throw new MetaException("Error getting DB list " + e.getMessage());
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   *
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterTables(String dbName, List<String> tabList)
+      throws MetaException {
+    try {
+      return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(),
+          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+    } catch (SemanticException e) {
+      throw new MetaException("Error getting Table list " + e.getMessage());
+    }
+  }
+
+  private String getUserName() {
+    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
+  }
+
+  /**
+   * load Hive auth provider
+   *
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+      try {
+        hiveAuthzBinding = new HiveAuthzBinding(
+            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private HiveConf getConf() {
+    return SessionState.get().getConf();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
new file mode 100644
index 0000000..2a0a5b8
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetaStoreFilterHook.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.metastore;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreFilterHook;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionSpec;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+
+public class SentryMetaStoreFilterHook implements MetaStoreFilterHook {
+
+  static final protected Log LOG = LogFactory.getLog(SentryMetaStoreFilterHook.class);
+
+  private HiveAuthzBinding hiveAuthzBinding;
+  private HiveAuthzConf authzConf;
+
+  public SentryMetaStoreFilterHook(HiveConf hiveConf) { //NOPMD
+  }
+
+  @Override
+  public List<String> filterDatabases(List<String> dbList) {
+    return filterDb(dbList);
+  }
+
+  @Override
+  public Database filterDatabase(Database dataBase)
+      throws NoSuchObjectException {
+    return dataBase;
+  }
+
+  @Override
+  public List<String> filterTableNames(String dbName, List<String> tableList) {
+    return filterTab(dbName, tableList);
+  }
+
+  @Override
+  public Table filterTable(Table table) throws NoSuchObjectException {
+    return table;
+  }
+
+  @Override
+  public List<Table> filterTables(List<Table> tableList) {
+    return tableList;
+  }
+
+  @Override
+  public List<Partition> filterPartitions(List<Partition> partitionList) {
+    return partitionList;
+  }
+
+  @Override
+  public List<PartitionSpec> filterPartitionSpecs(
+      List<PartitionSpec> partitionSpecList) {
+    return partitionSpecList;
+  }
+
+  @Override
+  public Partition filterPartition(Partition partition)
+      throws NoSuchObjectException {
+    return partition;
+  }
+
+  @Override
+  public List<String> filterPartitionNames(String dbName, String tblName,
+      List<String> partitionNames) {
+    return partitionNames;
+  }
+
+  @Override
+  public Index filterIndex(Index index) throws NoSuchObjectException {
+    return index;
+  }
+
+  @Override
+  public List<String> filterIndexNames(String dbName, String tblName,
+      List<String> indexList) {
+    return indexList;
+  }
+
+  @Override
+  public List<Index> filterIndexes(List<Index> indexeList) {
+    return indexeList;
+  }
+
+  /**
+   * Invoke Hive database filtering that removes the entries which use has no
+   * privileges to access
+   * @param dbList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterDb(List<String> dbList) {
+    try {
+      return HiveAuthzBindingHookBase.filterShowDatabases(getHiveAuthzBinding(),
+          dbList, HiveOperation.SHOWDATABASES, getUserName());
+    } catch (Exception e) {
+      LOG.warn("Error getting DB list ", e);
+      return new ArrayList<String>();
+    } finally {
+      close();
+    }
+  }
+
+  /**
+   * Invoke Hive table filtering that removes the entries which use has no
+   * privileges to access
+   * @param tabList
+   * @return
+   * @throws MetaException
+   */
+  private List<String> filterTab(String dbName, List<String> tabList) {
+    try {
+      return HiveAuthzBindingHookBase.filterShowTables(getHiveAuthzBinding(),
+          tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+    } catch (Exception e) {
+      LOG.warn("Error getting Table list ", e);
+      return new ArrayList<String>();
+    } finally {
+      close();
+    }
+  }
+
+  private String getUserName() {
+    return getConf().get(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME);
+  }
+
+  /**
+   * load Hive auth provider
+   * @return
+   * @throws MetaException
+   */
+  private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+    if (hiveAuthzBinding == null) {
+      String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+      if (hiveAuthzConf == null
+          || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+            + "' is invalid.");
+      }
+      try {
+        authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+      } catch (MalformedURLException e) {
+        throw new MetaException("Configuration key "
+            + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "' "
+            + e.getMessage());
+      }
+      try {
+        hiveAuthzBinding = new HiveAuthzBinding(
+            HiveAuthzBinding.HiveHook.HiveMetaStore, getConf(), authzConf);
+      } catch (Exception e) {
+        throw new MetaException("Failed to load Hive binding " + e.getMessage());
+      }
+    }
+    return hiveAuthzBinding;
+  }
+
+  private HiveConf getConf() {
+    return SessionState.get().getConf();
+  }
+
+  private void close() {
+    if (hiveAuthzBinding != null) {
+      hiveAuthzBinding.close();
+      hiveAuthzBinding = null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
new file mode 100644
index 0000000..d1197a4
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
@@ -0,0 +1,404 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.metastore;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropTableEvent;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.sentry.SentryUserException;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.core.common.Authorizable;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+import org.apache.sentry.service.thrift.ServiceConstants.ConfUtilties;
+import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class SentryMetastorePostEventListenerBase extends MetaStoreEventListener {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryMetastoreListenerPlugin.class);
+  private final HiveAuthzConf authzConf;
+  private final Server server;
+
+  protected List<SentryMetastoreListenerPlugin> sentryPlugins = new ArrayList<SentryMetastoreListenerPlugin>();
+
+  public SentryMetastorePostEventListenerBase(Configuration config) {
+    super(config);
+
+    if (!(config instanceof HiveConf)) {
+        String error = "Could not initialize Plugin - Configuration is not an instanceof HiveConf";
+        LOGGER.error(error);
+        throw new RuntimeException(error);
+    }
+
+    authzConf = HiveAuthzConf.getAuthzConf((HiveConf)config);
+    server = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
+    Iterable<String> pluginClasses = ConfUtilties.CLASS_SPLITTER
+        .split(config.get(ServerConfig.SENTRY_METASTORE_PLUGINS,
+            ServerConfig.SENTRY_METASTORE_PLUGINS_DEFAULT).trim());
+
+    try {
+      for (String pluginClassStr : pluginClasses) {
+        Class<?> clazz = config.getClassByName(pluginClassStr);
+        if (!SentryMetastoreListenerPlugin.class.isAssignableFrom(clazz)) {
+          throw new IllegalArgumentException("Class ["
+              + pluginClassStr + "] is not a "
+              + SentryMetastoreListenerPlugin.class.getName());
+        }
+        SentryMetastoreListenerPlugin plugin = (SentryMetastoreListenerPlugin) clazz
+            .getConstructor(Configuration.class, Configuration.class)
+            .newInstance(config, authzConf);
+        sentryPlugins.add(plugin);
+      }
+    } catch (Exception e) {
+      LOGGER.error("Could not initialize Plugin !!", e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void onCreateTable (CreateTableEvent tableEvent) throws MetaException {
+
+    // don't sync paths/privileges if the operation has failed
+    if (!tableEvent.getStatus()) {
+      LOGGER.debug("Skip sync paths/privileges with Sentry server for onCreateTable event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    if (tableEvent.getTable().getSd().getLocation() != null) {
+      String authzObj = tableEvent.getTable().getDbName() + "."
+          + tableEvent.getTable().getTableName();
+      String path = tableEvent.getTable().getSd().getLocation();
+      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+        plugin.addPath(authzObj, path);
+      }
+    }
+
+    // drop the privileges on the given table, in case if anything was left
+    // behind during the drop
+    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
+      return;
+    }
+
+    dropSentryTablePrivilege(tableEvent.getTable().getDbName(),
+        tableEvent.getTable().getTableName());
+  }
+
+  @Override
+  public void onDropTable(DropTableEvent tableEvent) throws MetaException {
+
+    // don't sync paths/privileges if the operation has failed
+    if (!tableEvent.getStatus()) {
+      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onDropTable event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    if (tableEvent.getTable().getSd().getLocation() != null) {
+      String authzObj = tableEvent.getTable().getDbName() + "."
+          + tableEvent.getTable().getTableName();
+      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+        plugin.removeAllPaths(authzObj, null);
+      }
+    }
+    // drop the privileges on the given table
+    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
+      return;
+    }
+
+    if (!tableEvent.getStatus()) {
+      return;
+    }
+
+    dropSentryTablePrivilege(tableEvent.getTable().getDbName(),
+        tableEvent.getTable().getTableName());
+  }
+
+  @Override
+  public void onCreateDatabase(CreateDatabaseEvent dbEvent)
+      throws MetaException {
+
+    // don't sync paths/privileges if the operation has failed
+    if (!dbEvent.getStatus()) {
+      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onCreateDatabase event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    if (dbEvent.getDatabase().getLocationUri() != null) {
+      String authzObj = dbEvent.getDatabase().getName();
+      String path = dbEvent.getDatabase().getLocationUri();
+      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+        plugin.addPath(authzObj, path);
+      }
+    }
+    // drop the privileges on the database, in case anything left behind during
+    // last drop db
+    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
+      return;
+    }
+
+    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
+  }
+
+  /**
+   * Drop the privileges on the database. Note that child tables will be
+   * dropped individually by client, so we just need to handle the removing
+   * the db privileges. The table drop should cleanup the table privileges.
+   */
+  @Override
+  public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
+
+    // don't sync paths/privileges if the operation has failed
+    if (!dbEvent.getStatus()) {
+      LOGGER.debug("Skip syncing paths/privileges with Sentry server for onDropDatabase event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    String authzObj = dbEvent.getDatabase().getName();
+    for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+      List<String> tNames = dbEvent.getHandler().get_all_tables(authzObj);
+      plugin.removeAllPaths(authzObj, tNames);
+    }
+    if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
+      return;
+    }
+
+    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
+  }
+
+  /**
+   * Adjust the privileges when table is renamed
+   */
+  @Override
+  public void onAlterTable (AlterTableEvent tableEvent) throws MetaException {
+
+    // don't sync privileges if the operation has failed
+    if (!tableEvent.getStatus()) {
+      LOGGER.debug("Skip syncing privileges with Sentry server for onAlterTable event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    renameSentryTablePrivilege(tableEvent.getOldTable().getDbName(),
+        tableEvent.getOldTable().getTableName(),
+        tableEvent.getOldTable().getSd().getLocation(),
+        tableEvent.getNewTable().getDbName(),
+        tableEvent.getNewTable().getTableName(),
+        tableEvent.getNewTable().getSd().getLocation());
+  }
+
+  @Override
+  public void onAlterPartition(AlterPartitionEvent partitionEvent)
+      throws MetaException {
+
+    // don't sync privileges if the operation has failed
+    if (!partitionEvent.getStatus()) {
+      LOGGER.debug("Skip syncing privileges with Sentry server for onAlterPartition event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    String oldLoc = null, newLoc = null;
+    if (partitionEvent.getOldPartition() != null) {
+      oldLoc = partitionEvent.getOldPartition().getSd().getLocation();
+    }
+    if (partitionEvent.getNewPartition() != null) {
+      newLoc = partitionEvent.getNewPartition().getSd().getLocation();
+    }
+
+    if (oldLoc != null && newLoc != null && !oldLoc.equals(newLoc)) {
+      String authzObj =
+          partitionEvent.getOldPartition().getDbName() + "."
+              + partitionEvent.getOldPartition().getTableName();
+      for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+        plugin.renameAuthzObject(authzObj, oldLoc,
+            authzObj, newLoc);
+      }
+    }
+  }
+
+  @Override
+  public void onAddPartition(AddPartitionEvent partitionEvent)
+      throws MetaException {
+
+    // don't sync path if the operation has failed
+    if (!partitionEvent.getStatus()) {
+      LOGGER.debug("Skip syncing path with Sentry server for onAddPartition event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    for (Partition part : partitionEvent.getPartitions()) {
+      if (part.getSd() != null && part.getSd().getLocation() != null) {
+        String authzObj = part.getDbName() + "." + part.getTableName();
+        String path = part.getSd().getLocation();
+        for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+          plugin.addPath(authzObj, path);
+        }
+      }
+    }
+    super.onAddPartition(partitionEvent);
+  }
+
+  @Override
+  public void onDropPartition(DropPartitionEvent partitionEvent)
+      throws MetaException {
+
+    // don't sync path if the operation has failed
+    if (!partitionEvent.getStatus()) {
+      LOGGER.debug("Skip syncing path with Sentry server for onDropPartition event," +
+        " since the operation failed. \n");
+      return;
+    }
+
+    String authzObj = partitionEvent.getTable().getDbName() + "."
+        + partitionEvent.getTable().getTableName();
+    String path = partitionEvent.getPartition().getSd().getLocation();
+    for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+      plugin.removePath(authzObj, path);
+    }
+    super.onDropPartition(partitionEvent);
+  }
+
+  private SentryPolicyServiceClient getSentryServiceClient()
+      throws MetaException {
+    try {
+      return SentryServiceClientFactory.create(authzConf);
+    } catch (Exception e) {
+      throw new MetaException("Failed to connect to Sentry service "
+          + e.getMessage());
+    }
+  }
+
+  private void dropSentryDbPrivileges(String dbName) throws MetaException {
+    List<Authorizable> authorizableTable = new ArrayList<Authorizable>();
+    authorizableTable.add(server);
+    authorizableTable.add(new Database(dbName));
+    try {
+      dropSentryPrivileges(authorizableTable);
+    } catch (SentryUserException e) {
+      throw new MetaException("Failed to remove Sentry policies for drop DB "
+          + dbName + " Error: " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to find local user " + e.getMessage());
+    }
+
+  }
+
+  private void dropSentryTablePrivilege(String dbName, String tabName)
+      throws MetaException {
+    List<Authorizable> authorizableTable = new ArrayList<Authorizable>();
+    authorizableTable.add(server);
+    authorizableTable.add(new Database(dbName));
+    authorizableTable.add(new Table(tabName));
+
+    try {
+      dropSentryPrivileges(authorizableTable);
+    } catch (SentryUserException e) {
+      throw new MetaException(
+          "Failed to remove Sentry policies for drop table " + dbName + "."
+              + tabName + " Error: " + e.getMessage());
+    } catch (IOException e) {
+      throw new MetaException("Failed to find local user " + e.getMessage());
+    }
+
+  }
+  private void dropSentryPrivileges(
+      List<? extends Authorizable> authorizableTable)
+      throws SentryUserException, IOException, MetaException {
+    String requestorUserName = UserGroupInformation.getCurrentUser()
+        .getShortUserName();
+    SentryPolicyServiceClient sentryClient = getSentryServiceClient();
+    sentryClient.dropPrivileges(requestorUserName, authorizableTable);
+
+    // Close the connection after dropping privileges is done.
+    sentryClient.close();
+  }
+
+  private void renameSentryTablePrivilege(String oldDbName, String oldTabName,
+      String oldPath, String newDbName, String newTabName, String newPath)
+      throws MetaException {
+    List<Authorizable> oldAuthorizableTable = new ArrayList<Authorizable>();
+    oldAuthorizableTable.add(server);
+    oldAuthorizableTable.add(new Database(oldDbName));
+    oldAuthorizableTable.add(new Table(oldTabName));
+
+    List<Authorizable> newAuthorizableTable = new ArrayList<Authorizable>();
+    newAuthorizableTable.add(server);
+    newAuthorizableTable.add(new Database(newDbName));
+    newAuthorizableTable.add(new Table(newTabName));
+
+    if (!oldTabName.equalsIgnoreCase(newTabName)
+        && syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_ALTER_WITH_POLICY_STORE)) {
+
+      SentryPolicyServiceClient sentryClient = getSentryServiceClient();
+
+      try {
+        String requestorUserName = UserGroupInformation.getCurrentUser()
+            .getShortUserName();
+        sentryClient.renamePrivileges(requestorUserName, oldAuthorizableTable, newAuthorizableTable);
+      } catch (SentryUserException e) {
+        throw new MetaException(
+            "Failed to remove Sentry policies for rename table " + oldDbName
+            + "." + oldTabName + "to " + newDbName + "." + newTabName
+            + " Error: " + e.getMessage());
+      } catch (IOException e) {
+        throw new MetaException("Failed to find local user " + e.getMessage());
+      } finally {
+
+        // Close the connection after renaming privileges is done.
+        sentryClient.close();
+      }
+    }
+    // The HDFS plugin needs to know if it's a path change (set location)
+    for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+      plugin.renameAuthzObject(oldDbName + "." + oldTabName, oldPath,
+          newDbName + "." + newTabName, newPath);
+    }
+  }
+
+  private boolean syncWithPolicyStore(AuthzConfVars syncConfVar) {
+    return "true"
+        .equalsIgnoreCase(authzConf.get(syncConfVar.getVar(), "true"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-v2/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml
index ef6048c..f33219d 100644
--- a/sentry-binding/sentry-binding-hive-v2/pom.xml
+++ b/sentry-binding/sentry-binding-hive-v2/pom.xml
@@ -31,7 +31,7 @@ limitations under the License.
   <dependencies>
     <dependency>
       <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-binding-hive</artifactId>
+      <artifactId>sentry-binding-hive-common</artifactId>
       <exclusions>
         <exclusion>
           <groupId>org.apache.httpcomponents</groupId>
@@ -114,21 +114,6 @@ limitations under the License.
       <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-provider-common</artifactId>
     </dependency>
-    <!-- required for SentryGrantRevokeTask -->
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-db</artifactId>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-beeline</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-metastore</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-provider-file</artifactId>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/pom.xml b/sentry-binding/sentry-binding-hive/pom.xml
index fb5f214..ff5882e 100644
--- a/sentry-binding/sentry-binding-hive/pom.xml
+++ b/sentry-binding/sentry-binding-hive/pom.xml
@@ -30,14 +30,6 @@ limitations under the License.
 
   <dependencies>
     <dependency>
-      <groupId>org.apache.thrift</groupId>
-      <artifactId>libthrift</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derby</artifactId>
-    </dependency>
-    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -54,32 +46,7 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-core-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-core-model-db</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-common</artifactId>
-    </dependency>
-    <!-- required for SentryGrantRevokeTask -->
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-db</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-file</artifactId>
-    </dependency>
-      <dependency>
-        <groupId>org.apache.sentry</groupId>
-        <artifactId>sentry-provider-cache</artifactId>
-      </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-policy-db</artifactId>
+      <artifactId>sentry-binding-hive-common</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -97,6 +64,11 @@ limitations under the License.
       <artifactId>mockito-all</artifactId>
       <scope>test</scope>
     </dependency>
+    <!-- required for SentryGrantRevokeTask -->
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-db</artifactId>
+    </dependency>
   </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
deleted file mode 100644
index 5238414..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive;
-
-import java.util.EnumSet;
-
-import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
-
-public class SentryHiveConstants {
-  public static final EnumSet<PrivilegeType> ALLOWED_PRIVS = EnumSet.allOf(PrivilegeType.class);
-
-  public static final String PRIVILEGE_NOT_SUPPORTED = "Sentry does not support privilege: ";
-  public static final String PARTITION_PRIVS_NOT_SUPPORTED = "Sentry does not support partition level authorization";
-  public static final String GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT = "Sentry does not allow grant/revoke on: ";
-  public static final String GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL = "Sentry does not allow privileges to be granted/revoked to/from: ";
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
deleted file mode 100644
index 8838368..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.exec;
-
-import static org.apache.hadoop.util.StringUtils.stringifyException;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
-import org.apache.sentry.core.common.Subject;
-
-import com.google.common.base.Preconditions;
-
-public class SentryFilterDDLTask extends DDLTask {
-  private static final long serialVersionUID = 1L;
-  private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class);
-
-  private HiveAuthzBinding hiveAuthzBinding;
-  private Subject subject;
-  private HiveOperation stmtOperation;
-
-  public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject,
-      HiveOperation stmtOperation) {
-    Preconditions.checkNotNull(hiveAuthzBinding);
-    Preconditions.checkNotNull(subject);
-    Preconditions.checkNotNull(stmtOperation);
-
-    this.hiveAuthzBinding = hiveAuthzBinding;
-    this.subject = subject;
-    this.stmtOperation = stmtOperation;
-  }
-
-  public HiveAuthzBinding getHiveAuthzBinding() {
-    return hiveAuthzBinding;
-  }
-
-  public Subject getSubject() {
-    return subject;
-  }
-
-  public HiveOperation getStmtOperation() {
-    return stmtOperation;
-  }
-
-  @Override
-  public int execute(DriverContext driverContext) {
-    // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command.
-    ShowColumnsDesc showCols = work.getShowColumnsDesc();
-    try {
-      if (showCols != null) {
-        return showFilterColumns(showCols);
-      }
-    } catch (Throwable e) {
-      failed(e);
-      return 1;
-    }
-
-    return super.execute(driverContext);
-  }
-
-  private void failed(Throwable e) {
-    while (e.getCause() != null && e.getClass() == RuntimeException.class) {
-      e = e.getCause();
-    }
-    setException(e);
-    LOG.error(stringifyException(e));
-  }
-
-  /**
-   * Filter the command "show columns in table"
-   *
-   */
-  private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException {
-    Table table = Hive.get(conf).getTable(showCols.getTableName());
-
-    // write the results in the file
-    DataOutputStream outStream = null;
-    try {
-      Path resFile = new Path(showCols.getResFile());
-      FileSystem fs = resFile.getFileSystem(conf);
-      outStream = fs.create(resFile);
-
-      List<FieldSchema> cols = table.getCols();
-      cols.addAll(table.getPartCols());
-      // In case the query is served by HiveServer2, don't pad it with spaces,
-      // as HiveServer2 output is consumed by JDBC/ODBC clients.
-      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
-      outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(
-          fiterColumns(cols, table), false, isOutputPadded, null));
-      outStream.close();
-      outStream = null;
-    } catch (IOException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-    return 0;
-  }
-
-  private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException {
-    // filter some columns that the subject has privilege on
-    return HiveAuthzBindingHook.filterShowColumns(getHiveAuthzBinding(),
-        cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName());
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
index 31eb5e8..cd8352a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
@@ -55,7 +55,7 @@ import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.sentry.SentryUserException;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
 import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
 import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
 import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
@@ -160,7 +160,7 @@ public class SentryGrantRevokeTask extends Task<DDLWork> implements Serializable
             new HashSet<WriteEntity>(), stmtOperation,
             null, null, null, null, subject.getName(), ipAddress,
             new AuthorizationException(e), conf);
-        HiveAuthzBindingHook.runFailureHook(hookContext, csHooks);
+        HiveAuthzBindingHookBase.runFailureHook(hookContext, csHooks);
         throw e; // rethrow the exception for logging
       }
     } catch(SentryUserException e) {

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
deleted file mode 100644
index 4fa4221..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
-
-public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc {
-  private boolean isUri;
-  private boolean isServer;
-
-  public SentryHivePrivilegeObjectDesc() {
-    // reset table type which is on by default
-    super.setTable(false);
-  }
-
-  public boolean getUri() {
-    return isUri;
-  }
-
-  public void setUri(boolean isUri) {
-    this.isUri = isUri;
-  }
-
-  public boolean getServer() {
-    return isServer;
-  }
-
-  public void setServer(boolean isServer) {
-    this.isServer = isServer;
-  }
-
-  public boolean isSentryPrivObjectDesc() {
-    return isServer || isUri;
-  }
-
-}


[2/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)

Posted by sd...@apache.org.
http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
index c425e06..d752b25 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
@@ -16,41 +16,22 @@
  */
 package org.apache.sentry.binding.hive;
 
-import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-
 import java.io.Serializable;
 import java.net.MalformedURLException;
-import java.net.URI;
 import java.net.URL;
 import java.security.CodeSource;
-import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.List;
 import java.util.Set;
-import java.util.Arrays;
 
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.DDLTask;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
 import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.Entity;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
-import org.apache.hadoop.hive.ql.hooks.Hook;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
@@ -58,52 +39,20 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
-import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
 import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
 import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.common.utils.PathUtils;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Column;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
 import org.apache.sentry.core.model.db.Database;
 import org.apache.sentry.core.model.db.Table;
-import org.apache.sentry.provider.cache.PrivilegeCache;
-import org.apache.sentry.provider.cache.SimplePrivilegeCache;
-import org.apache.sentry.provider.common.AuthorizationProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Splitter;
-import com.google.common.collect.ImmutableList;
+import com.google.common.base.Preconditions;
 
-public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
+public class HiveAuthzBindingHook extends HiveAuthzBindingHookBase {
   private static final Logger LOG = LoggerFactory
       .getLogger(HiveAuthzBindingHook.class);
-  private final HiveAuthzBinding hiveAuthzBinding;
-  private final HiveAuthzConf authzConf;
-  private Database currDB = Database.ALL;
-  private Table currTab;
-  private AccessURI udfURI;
-  private AccessURI serdeURI;
-  private AccessURI partitionURI;
-  private Table currOutTab = null;
-  private Database currOutDB = null;
-  private final List<String> serdeWhiteList;
-  private boolean serdeURIPrivilegesEnabled;
-
-  // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants
-  // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same
-  // HiveOperationType, but we want to enforces different privileges on each statement.
-  // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants
-  // require table-level privileges.
-  public boolean isDescTableBasic = false;
 
   public HiveAuthzBindingHook() throws Exception {
     SessionState session = SessionState.get();
@@ -117,16 +66,7 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
     if(hiveConf == null) {
       throw new IllegalStateException("Session HiveConf is null");
     }
-    authzConf = loadAuthzConf(hiveConf);
-    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
-
-    String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
-        HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
-    serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
-    serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
-        HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
 
-    FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
   }
 
   public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
@@ -324,64 +264,6 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
     return ast;
   }
 
-  // Find the current database for session
-  private Database getCanonicalDb() {
-    return new Database(SessionState.get().getCurrentDatabase());
-  }
-
-  private Database extractDatabase(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Database(tableName.split("\\.")[0]);
-    } else {
-      return getCanonicalDb();
-    }
-  }
-  private Table extractTable(ASTNode ast) throws SemanticException {
-    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
-    if (tableName.contains(".")) {
-      return new Table(tableName.split("\\.")[1]);
-    } else {
-      return new Table(tableName);
-    }
-  }
-
-  @VisibleForTesting
-  protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
-    for (int i = 0; i < ast.getChildCount(); i++) {
-      ASTNode child = (ASTNode)ast.getChild(i);
-      if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
-          child.getChildCount() == 1) {
-        return parseURI(BaseSemanticAnalyzer.
-          unescapeSQLString(child.getChild(0).getText()));
-      }
-    }
-    return null;
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri) throws SemanticException {
-    return parseURI(uri, false);
-  }
-
-  @VisibleForTesting
-  protected static AccessURI parseURI(String uri, boolean isLocal)
-      throws SemanticException {
-    try {
-      HiveConf conf = SessionState.get().getConf();
-      String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-      Path warehousePath = new Path(warehouseDir);
-      if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
-        FileSystem fs = FileSystem.get(conf);
-        warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
-      }
-      return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
-    } catch (Exception e) {
-      throw new SemanticException("Error parsing URI " + uri + ": " +
-        e.getMessage(), e);
-    }
-  }
-
   /**
    * Post analyze hook that invokes hive auth bindings
    */
@@ -459,598 +341,4 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
     }
   }
 
-  private void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context,
-      HiveOperation hiveOp, AuthorizationException e) {
-    SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
-        context.getCommand(), context.getInputs(), context.getOutputs(),
-        hiveOp, currDB, currTab, udfURI, null, context.getUserName(),
-        context.getIpAddress(), e, context.getConf());
-    String csHooks = authzConf.get(
-        HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
-
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook)aofh).run(hookCtx);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-
-  public static void runFailureHook(SentryOnFailureHookContext hookContext,
-      String csHooks) {
-    try {
-      for (Hook aofh : getHooks(csHooks)) {
-        ((SentryOnFailureHook) aofh).run(hookContext);
-      }
-    } catch (Exception ex) {
-      LOG.error("Error executing hook:", ex);
-    }
-  }
-  /**
-   * Convert the input/output entities into authorizables. generate
-   * authorizables for cases like Database and metadata operations where the
-   * compiler doesn't capture entities. invoke the hive binding to validate
-   * permissions
-   *
-   * @param context
-   * @param stmtAuthObject
-   * @param stmtOperation
-   * @throws AuthorizationException
-   */
-  private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
-      HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws  AuthorizationException {
-    Set<ReadEntity> inputs = context.getInputs();
-    Set<WriteEntity> outputs = context.getOutputs();
-    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
-      LOG.debug("context.getInputs() = " + context.getInputs());
-      LOG.debug("context.getOutputs() = " + context.getOutputs());
-    }
-
-    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
-    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
-    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column
-    // level privs.
-    if (isDescTableBasic) {
-      stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
-    }
-
-    switch (stmtAuthObject.getOperationScope()) {
-
-    case SERVER :
-      // validate server level privileges if applicable. Eg create UDF,register jar etc ..
-      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
-      serverHierarchy.add(hiveAuthzBinding.getAuthServer());
-      inputHierarchy.add(serverHierarchy);
-      break;
-    case DATABASE:
-      // workaround for database scope statements (create/alter/drop db)
-      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
-      dbHierarchy.add(hiveAuthzBinding.getAuthServer());
-      dbHierarchy.add(currDB);
-      inputHierarchy.add(dbHierarchy);
-      outputHierarchy.add(dbHierarchy);
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-      break;
-    case TABLE:
-      // workaround for add partitions
-      if(partitionURI != null) {
-        inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
-      }
-
-      getInputHierarchyFromInputs(inputHierarchy, inputs);
-      for (WriteEntity writeEntity: outputs) {
-        if (filterWriteEntity(writeEntity)) {
-          continue;
-        }
-        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-        outputHierarchy.add(entityHierarchy);
-      }
-      // workaround for metadata queries.
-      // Capture the table name in pre-analyze and include that in the input entity list
-      if (currTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currDB);
-        externalAuthorizableHierarchy.add(currTab);
-        inputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-
-
-      // workaround for DDL statements
-      // Capture the table name in pre-analyze and include that in the output entity list
-      if (currOutTab != null) {
-        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-        externalAuthorizableHierarchy.add(currOutDB);
-        externalAuthorizableHierarchy.add(currOutTab);
-        outputHierarchy.add(externalAuthorizableHierarchy);
-      }
-
-      if (serdeURI != null) {
-        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        serdeUriHierarchy.add(serdeURI);
-        outputHierarchy.add(serdeUriHierarchy);
-      }
-
-      break;
-    case FUNCTION:
-      /* The 'FUNCTION' privilege scope currently used for
-       *  - CREATE TEMP FUNCTION
-       *  - DROP TEMP FUNCTION.
-       */
-      if (udfURI != null) {
-        List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
-        udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
-        udfUriHierarchy.add(udfURI);
-        inputHierarchy.add(udfUriHierarchy);
-        for (WriteEntity writeEntity : outputs) {
-          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
-          outputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    case CONNECT:
-      /* The 'CONNECT' is an implicit privilege scope currently used for
-       *  - USE <db>
-       *  It's allowed when the user has any privilege on the current database. For application
-       *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
-       */
-      List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
-      connectHierarchy.add(hiveAuthzBinding.getAuthServer());
-      // by default allow connect access to default db
-      Table currTbl = Table.ALL;
-      Column currCol = Column.ALL;
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
-          "false".equalsIgnoreCase(authzConf.
-              get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
-        currDB = Database.ALL;
-        currTbl = Table.SOME;
-      }
-
-      connectHierarchy.add(currDB);
-      connectHierarchy.add(currTbl);
-      connectHierarchy.add(currCol);
-
-      inputHierarchy.add(connectHierarchy);
-      outputHierarchy.add(connectHierarchy);
-      break;
-    case COLUMN:
-      for (ReadEntity readEntity: inputs) {
-        if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
-          addColumnHierarchy(inputHierarchy, readEntity);
-        } else {
-          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-          entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-          entityHierarchy.add(Column.ALL);
-          inputHierarchy.add(entityHierarchy);
-        }
-      }
-      break;
-    default:
-      throw new AuthorizationException("Unknown operation scope type " +
-          stmtAuthObject.getOperationScope().toString());
-    }
-
-    HiveAuthzBinding binding = null;
-    try {
-      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
-    } catch (SemanticException e) {
-      // Will use the original hiveAuthzBinding
-      binding = hiveAuthzBinding;
-    }
-    // validate permission
-    binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy,
-        outputHierarchy);
-  }
-
-  private HiveOperation getCurrentHiveStmtOp() {
-    SessionState sessState = SessionState.get();
-    if (sessState == null) {
-      // TODO: Warn
-      return null;
-    }
-    return sessState.getHiveOperation();
-  }
-
-  private Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
-    // Extract the username from the hook context
-    return new Subject(context.getUserName());
-  }
-
-  // Build the hierarchy of authorizable object for the given entity type.
-  private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) {
-    List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>();
-    switch (entity.getType()) {
-    case TABLE:
-      objectHierarchy.add(new Database(entity.getTable().getDbName()));
-      objectHierarchy.add(new Table(entity.getTable().getTableName()));
-      break;
-    case PARTITION:
-    case DUMMYPARTITION:
-      objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
-      objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
-      break;
-    case DFS_DIR:
-    case LOCAL_DIR:
-      try {
-        objectHierarchy.add(parseURI(entity.toString(),
-            entity.getType().equals(Entity.Type.LOCAL_DIR)));
-      } catch (Exception e) {
-        throw new AuthorizationException("Failed to get File URI", e);
-      }
-      break;
-    case DATABASE:
-    case FUNCTION:
-      // TODO use database entities from compiler instead of capturing from AST
-      break;
-    default:
-      throw new UnsupportedOperationException("Unsupported entity type " +
-          entity.getType().name());
-    }
-    return objectHierarchy;
-  }
-
-  /**
-   * Add column level hierarchy to inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  private void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy,
-      ReadEntity entity) {
-    List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-    entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-    entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity));
-
-    switch (entity.getType()) {
-    case TABLE:
-    case PARTITION:
-      List<String> cols = entity.getAccessedColumns();
-      for (String col : cols) {
-        List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy);
-        colHierarchy.add(new Column(col));
-        inputHierarchy.add(colHierarchy);
-      }
-      break;
-    default:
-      inputHierarchy.add(entityHierarchy);
-    }
-  }
-
-  /**
-   * Get Authorizable from inputs and put into inputHierarchy
-   *
-   * @param inputHierarchy
-   * @param entity
-   * @param sentryContext
-   */
-  private void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy,
-      Set<ReadEntity> inputs) {
-    for (ReadEntity readEntity: inputs) {
-      // skip the tables/view that are part of expanded view definition
-      // skip the Hive generated dummy entities created for queries like 'select <expr>'
-      if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
-        continue;
-      }
-      if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
-        addColumnHierarchy(inputHierarchy, readEntity);
-      } else {
-        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
-        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
-        entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
-        inputHierarchy.add(entityHierarchy);
-      }
-    }
-  }
-
-  // Check if this write entity needs to skipped
-  private boolean filterWriteEntity(WriteEntity writeEntity)
-      throws AuthorizationException {
-    // skip URI validation for session scratch file URIs
-    if (writeEntity.isTempURI()) {
-      return true;
-    }
-    try {
-      if (writeEntity.getTyp().equals(Type.DFS_DIR)
-          || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
-        HiveConf conf = SessionState.get().getConf();
-        String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
-        URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
-        URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir,
-          writeEntity.getLocation().getPath()));
-        LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI);
-        if (PathUtils.impliesURI(scratchURI, requestURI)) {
-          return true;
-        }
-        URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
-        URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
-        LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI);
-        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
-          return true;
-        }
-      }
-    } catch (Exception e) {
-      throw new AuthorizationException("Failed to extract uri details", e);
-    }
-    return false;
-  }
-
-  public static List<String> filterShowTables(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName, String dbName)
-          throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        setOperationScope(HiveOperationScope.TABLE).
-        setOperationType(HiveOperationType.INFO).
-        build();
-
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    for (String tableName : queryResult) {
-      // if user has privileges on table, add to filtered list, else discard
-      Table table = new Table(tableName);
-      Database database;
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(table.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<FieldSchema> filterShowColumns(
-      HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols,
-      HiveOperation operation, String userName, String tableName, String dbName)
-          throws SemanticException {
-    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
-    Subject subject = new Subject(userName);
-    HiveAuthzPrivileges columnMetaDataPrivilege =
-        HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    Database database = new Database(dbName);
-    Table table = new Table(tableName);
-    for (FieldSchema col : cols) {
-      // if user has privileges on column, add to filtered list, else discard
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(table);
-      externalAuthorizableHierarchy.add(new Column(col.getName()));
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(col);
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the column is
-        // not added to
-        // filtered list.
-      }
-    }
-    return filteredResult;
-  }
-
-  public static List<String> filterShowDatabases(
-      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
-      HiveOperation operation, String userName) throws SemanticException {
-    List<String> filteredResult = new ArrayList<String>();
-    Subject subject = new Subject(userName);
-    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
-
-    HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
-        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
-        addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)).
-        setOperationScope(HiveOperationScope.CONNECT).
-        setOperationType(HiveOperationType.QUERY).
-        build();
-
-    for (String dbName:queryResult) {
-      // if user has privileges on database, add to filtered list, else discard
-      Database database = null;
-
-      // if default is not restricted, continue
-      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase(
-        hiveAuthzBinding.getAuthzConf().get(
-              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
-              "false"))) {
-        filteredResult.add(DEFAULT_DATABASE_NAME);
-        continue;
-      }
-
-      database = new Database(dbName);
-
-      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
-      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
-      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
-      externalAuthorizableHierarchy.add(database);
-      externalAuthorizableHierarchy.add(Table.ALL);
-      externalAuthorizableHierarchy.add(Column.ALL);
-      inputHierarchy.add(externalAuthorizableHierarchy);
-
-      try {
-        // do the authorization by new HiveAuthzBinding with PrivilegeCache
-        hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject,
-            inputHierarchy, outputHierarchy);
-        filteredResult.add(database.getName());
-      } catch (AuthorizationException e) {
-        // squash the exception, user doesn't have privileges, so the table is
-        // not added to
-        // filtered list.
-      }
-    }
-
-    return filteredResult;
-  }
-
-  /**
-   * Check if the given read entity is a table that has parents of type Table
-   * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both
-   * the original view and the tables/view that it selects from .
-   * The access authorization is only interested in the top level views and not the underlying tables.
-   * @param readEntity
-   * @return
-   */
-  private boolean isChildTabForView(ReadEntity readEntity) {
-    // If this is a table added for view, then we need to skip that
-    if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) {
-      return false;
-    }
-    if (readEntity.getParents() != null && readEntity.getParents().size() > 0) {
-      for (ReadEntity parentEntity : readEntity.getParents()) {
-        if (!parentEntity.getType().equals(Type.TABLE)) {
-          return false;
-        }
-      }
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Returns the hooks specified in a configuration variable.  The hooks are returned in a list in
-   * the order they were specified in the configuration variable.
-   *
-   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
-   *                    class names.
-   * @return            A list of the hooks, in the order they are listed in the value of hookConfVar
-   * @throws Exception
-   */
-  private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception {
-
-    List<T> hooks = new ArrayList<T>();
-    if (csHooks.isEmpty()) {
-      return hooks;
-    }
-    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
-      try {
-        @SuppressWarnings("unchecked")
-        T hook =
-            (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
-        hooks.add(hook);
-      } catch (ClassNotFoundException e) {
-        LOG.error(hookClass + " Class not found:" + e.getMessage());
-        throw e;
-      }
-    }
-
-    return hooks;
-  }
-
-  // Check if the given entity is identified as dummy by Hive compilers.
-  private boolean isDummyEntity(Entity entity) {
-    return entity.isDummy();
-  }
-
-  // create hiveBinding with PrivilegeCache
-  private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
-      String userName) throws SemanticException {
-    // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider
-    AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider();
-    Set<String> userPrivileges = authProvider.getPolicyEngine().getPrivileges(
-            authProvider.getGroupMapping().getGroups(userName), hiveAuthzBinding.getActiveRoleSet(),
-            hiveAuthzBinding.getAuthServer());
-
-    // create PrivilegeCache using user's privileges
-    PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
-    try {
-      // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend
-      return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(),
-              hiveAuthzBinding.getAuthzConf(), privilegeCache);
-    } catch (Exception e) {
-      LOG.error("Can not create HiveAuthzBinding with privilege cache.");
-      throw new SemanticException(e);
-    }
-  }
-
-  private static boolean hasPrefixMatch(List<String> prefixList, final String str) {
-    for (String prefix : prefixList) {
-      if (str.startsWith(prefix)) {
-        return true;
-      }
-    }
-
-    return false;
-  }
-
-  /**
-   * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null,
-   * the URI authorization checks will be skipped.
-   */
-  private void setSerdeURI(String serdeClassName) throws SemanticException {
-    if (!serdeURIPrivilegesEnabled) {
-      return;
-    }
-
-    // WhiteList Serde Jar can be used by any users. WhiteList checking is
-    // done by comparing the Java package name. The assumption is cluster
-    // admin will ensure there is no Java namespace collision.
-    // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should
-    // ensure no custom Serde class is introduced under the same namespace.
-    if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
-      try {
-        CodeSource serdeSrc = Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader()).getProtectionDomain().getCodeSource();
-        if (serdeSrc == null) {
-          throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName);
-        }
-
-        String serdeJar = serdeSrc.getLocation().getPath();
-        if (serdeJar == null || serdeJar.isEmpty()) {
-          throw new SemanticException("Could not find the jar for Serde class " + serdeClassName + "to validate privileges");
-        }
-
-        serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
-      } catch (ClassNotFoundException e) {
-        throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e);
-      }
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
index 17b9003..6d9150f 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
@@ -104,7 +104,7 @@ public class HiveAuthzBindingSessionHook
 
     appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
         SEMANTIC_HOOK);
-    HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
+    HiveAuthzConf authzConf = HiveAuthzBindingHookBase.loadAuthzConf(sessionConf);
     String commandWhitelist =
         authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
             HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
deleted file mode 100644
index 45747df..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.io.File;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.sentry.policy.common.PolicyConstants;
-import org.apache.sentry.provider.common.PolicyFileConstants;
-import org.apache.sentry.provider.common.ProviderBackendContext;
-import org.apache.sentry.provider.file.SimpleFileProviderBackend;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Charsets;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.collect.Table;
-import com.google.common.io.Files;
-
-/**
- * SentryIniPolicyFileFormatter is to parse file and write data to file for sentry mapping data with
- * ini format, eg:
- * [groups]
- * group1=role1
- * [roles]
- * role1=server=server1
- */
-public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter {
-
-  private static final Logger LOGGER = LoggerFactory.getLogger(SentryIniPolicyFileFormatter.class);
-
-  private static final String NL = System.getProperty("line.separator", "\n");
-
-  /**
-   * Write the sentry mapping data to ini file.
-   * 
-   * @param resourcePath
-   *        The path of the output file
-   * @param sentryMappingData
-   *        The map for sentry mapping data, eg:
-   *        for the following mapping data:
-   *        group1=role1,role2
-   *        group2=role2,role3
-   *        role1=server=server1->db=db1
-   *        role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2
-   *        role3=server=server1->url=hdfs://localhost/path
-   * 
-   *        The sentryMappingData will be inputed as:
-   *        {
-   *        groups={[group1={role1, role2}], group2=[role2, role3]},
-   *        roles={role1=[server=server1->db=db1],
-   *        role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2],
-   *        role3=[server=server1->url=hdfs://localhost/path]
-   *        }
-   *        }
-   */
-  @Override
-  public void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
-      throws Exception {
-    File destFile = new File(resourcePath);
-    if (destFile.exists() && !destFile.delete()) {
-      throw new IllegalStateException("Unable to delete " + destFile);
-    }
-    String contents = Joiner
-        .on(NL)
-        .join(
-        generateSection(PolicyFileConstants.GROUPS,
-                sentryMappingData.get(PolicyFileConstants.GROUPS)),
-        generateSection(PolicyFileConstants.ROLES,
-                sentryMappingData.get(PolicyFileConstants.ROLES)),
-            "");
-    LOGGER.info("Writing policy file to " + destFile + ":\n" + contents);
-    Files.write(contents, destFile, Charsets.UTF_8);
-  }
-
-  /**
-   * parse the ini file and return a map with all data
-   * 
-   * @param resourcePath
-   *        The path of the input file
-   * @param conf
-   *        The configuration info
-   * @return the result of sentry mapping data in map structure.
-   */
-  @Override
-  public Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
-      throws Exception {
-    Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap();
-    // SimpleFileProviderBackend is used for parse the ini file
-    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath);
-    ProviderBackendContext context = new ProviderBackendContext();
-    context.setAllowPerDatabase(true);
-    // parse the ini file
-    policyFileBackend.initialize(context);
-
-    // SimpleFileProviderBackend parsed the input file and output the data in Table format.
-    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend
-        .getGroupRolePrivilegeTable();
-    Map<String, Set<String>> groupRolesMap = Maps.newHashMap();
-    Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap();
-    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
-      for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
-        // get the roles set for the current groupName
-        Set<String> tempRoles = groupRolesMap.get(groupName);
-        if (tempRoles == null) {
-          tempRoles = Sets.newHashSet();
-        }
-        Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
-        // if there has privilege for [group,role], if no privilege exist, the [group, role] info
-        // will be discard.
-        if (privileges != null) {
-          // update [group, role] mapping data
-          tempRoles.add(roleName);
-          groupRolesMap.put(groupName, tempRoles);
-          // update [role, privilege] mapping data
-          rolePrivilegesMap.put(roleName, privileges);
-        }
-      }
-    }
-    resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
-    resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
-    return resultMap;
-  }
-
-  // generate the ini section according to the mapping data.
-  private String generateSection(String name, Map<String, Set<String>> mappingData) {
-    if (mappingData.isEmpty()) {
-      return "";
-    }
-    List<String> lines = Lists.newArrayList();
-    lines.add("[" + name + "]");
-    for (Map.Entry<String, Set<String>> entry : mappingData.entrySet()) {
-      lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
-          PolicyConstants.ROLE_JOINER.join(entry.getValue())));
-    }
-    return Joiner.on(NL).join(lines);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
deleted file mode 100644
index 45a2925..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import org.apache.hadoop.hive.ql.hooks.Hook;
-
-/**
- *
- * SentryOnFailureHook allows Sentry to be extended
- * with custom logic to be executed upon authorization failure.
- *
- */
-public interface SentryOnFailureHook extends Hook {
-
-  /**
-   *
-   * @param context
-   *     The hook context passed to each hook.
-   * @throws Exception
-   */
-  void run(SentryOnFailureHookContext context) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
deleted file mode 100644
index c101a4f..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Table;
-
-/**
- * Context information provided by Access to implementations
- * of AccessOnFailureHook
- */
-public interface SentryOnFailureHookContext  {
-
-  /**
-   * @return the command attempted by user
-   */
-  String getCommand();
-
-  /**
-    * @return the set of read entities
-    */
-  Set<ReadEntity> getInputs();
-
-  /**
-   * @return the set of write entities
-   */
-  Set<WriteEntity> getOutputs();
-
-  /**
-   * @return the operation
-   */
-  HiveOperation getHiveOp();
-
-  /**
-   * @return the user name
-   */
-  String getUserName();
-
-  /**
-   * @return the ip address
-   */
-  String getIpAddress();
-
-  /**
-   * @return the database object
-   */
-  Database getDatabase();
-
-  /**
-   * @return the table object
-   */
-  Table getTable();
-
-  /**
-   * @return the udf URI
-   */
-  AccessURI getUdfURI();
-
-  /**
-   * @return the partition URI
-   */
-  AccessURI getPartitionURI();
-
-  /**
-   * @return the authorization failure exception
-   */
-  AuthorizationException getException();
-
-  /**
-   * @return the config
-   */
-  Configuration getConf();
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
deleted file mode 100644
index f97d7f3..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.core.model.db.AccessURI;
-import org.apache.sentry.core.model.db.Database;
-import org.apache.sentry.core.model.db.Table;
-
-public class SentryOnFailureHookContextImpl implements SentryOnFailureHookContext {
-
-  private final String command;
-  private final Set<ReadEntity> inputs;
-  private final Set<WriteEntity> outputs;
-  private final HiveOperation hiveOp;
-  private final String userName;
-  private final String ipAddress;
-  private final Database database;
-  private final Table table;
-  private final AccessURI udfURI;
-  private final AccessURI partitionURI;
-  private final AuthorizationException authException;
-  private final Configuration conf;
-
-  public SentryOnFailureHookContextImpl(String command,
-      Set<ReadEntity> inputs, Set<WriteEntity> outputs, HiveOperation hiveOp,
-      Database db, Table tab, AccessURI udfURI, AccessURI partitionURI,
-      String userName, String ipAddress, AuthorizationException e,
-      Configuration conf) {
-    this.command = command;
-    this.inputs = inputs;
-    this.outputs = outputs;
-    this.hiveOp = hiveOp;
-    this.userName = userName;
-    this.ipAddress = ipAddress;
-    this.database = db;
-    this.table = tab;
-    this.udfURI = udfURI;
-    this.partitionURI = partitionURI;
-    this.authException = e;
-    this.conf = conf;
-  }
-
-  @Override
-  public String getCommand() {
-    return command;
-  }
-
-  @Override
-  public Set<ReadEntity> getInputs() {
-    return inputs;
-  }
-
-  @Override
-  public Set<WriteEntity> getOutputs() {
-    return outputs;
-  }
-
-  @Override
-  public HiveOperation getHiveOp() {
-    return hiveOp;
-  }
-
-  @Override
-  public String getUserName() {
-    return userName;
-  }
-
-  @Override
-  public String getIpAddress() {
-    return ipAddress;
-  }
-
-  @Override
-  public Database getDatabase() {
-    return database;
-  }
-
-  @Override
-  public Table getTable() {
-    return table;
-  }
-
-  @Override
-  public AccessURI getUdfURI() {
-    return udfURI;
-  }
-
-  @Override
-  public AccessURI getPartitionURI() {
-    return partitionURI;
-  }
-
-  @Override
-  public AuthorizationException getException() {
-    return authException;
-  }
-
-  @Override
-  public Configuration getConf() {
-    return conf;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
deleted file mode 100644
index d2c6072..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.lang.reflect.Constructor;
-
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-
-/**
- * SentryPolicyFileFormatFactory is used to create FileFormatter for different file type according
- * to the configuration, the default FileFormatter is for ini file.
- */
-public class SentryPolicyFileFormatFactory {
-
-  public static SentryPolicyFileFormatter createFileFormatter(HiveAuthzConf conf) throws Exception {
-    // The default formatter is org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter, for ini
-    // file.
-    String policyFileFormatterName = conf.get(AuthzConfVars.AUTHZ_POLICY_FILE_FORMATTER.getVar());
-    // load the policy file formatter class
-    Constructor<?> policyFileFormatterConstructor = Class.forName(policyFileFormatterName)
-        .getDeclaredConstructor();
-    policyFileFormatterConstructor.setAccessible(true);
-    SentryPolicyFileFormatter sentryPolicyFileFormatter = (SentryPolicyFileFormatter) policyFileFormatterConstructor
-        .newInstance();
-    return sentryPolicyFileFormatter;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
deleted file mode 100644
index 4f465b3..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.binding.hive;
-
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * SentryPolicyFileFormatter is to parse file and write data to file for sentry mapping data.
- */
-public interface SentryPolicyFileFormatter {
-
-  // write the sentry mapping data to file
-  void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
-      throws Exception;
-
-  // parse the sentry mapping data from file
-  Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
-      throws Exception;
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
deleted file mode 100644
index 0a1d0e8..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
+++ /dev/null
@@ -1,407 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.authz;
-
-import java.lang.reflect.Constructor;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
-import org.apache.sentry.SentryUserException;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
-import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
-import org.apache.sentry.binding.hive.conf.InvalidConfigurationException;
-import org.apache.sentry.core.common.ActiveRoleSet;
-import org.apache.sentry.core.common.Subject;
-import org.apache.sentry.core.model.db.AccessConstants;
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-import org.apache.sentry.core.model.db.Server;
-import org.apache.sentry.policy.common.PolicyEngine;
-import org.apache.sentry.provider.cache.PrivilegeCache;
-import org.apache.sentry.provider.cache.SimpleCacheProviderBackend;
-import org.apache.sentry.provider.common.AuthorizationProvider;
-import org.apache.sentry.provider.common.ProviderBackend;
-import org.apache.sentry.provider.common.ProviderBackendContext;
-import org.apache.sentry.provider.db.service.thrift.TSentryRole;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.Sets;
-
-public class HiveAuthzBinding {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(HiveAuthzBinding.class);
-  private static final Splitter ROLE_SET_SPLITTER = Splitter.on(",").trimResults()
-      .omitEmptyStrings();
-  public static final String HIVE_BINDING_TAG = "hive.authz.bindings.tag";
-
-  private final HiveConf hiveConf;
-  private final Server authServer;
-  private final AuthorizationProvider authProvider;
-  private volatile boolean open;
-  private ActiveRoleSet activeRoleSet;
-  private HiveAuthzConf authzConf;
-
-  public static enum HiveHook {
-    HiveServer2,
-    HiveMetaStore
-    ;
-  }
-
-  public HiveAuthzBinding (HiveConf hiveConf, HiveAuthzConf authzConf) throws Exception {
-    this(HiveHook.HiveServer2, hiveConf, authzConf);
-  }
-
-  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf) throws Exception {
-    validateHiveConfig(hiveHook, hiveConf, authzConf);
-    this.hiveConf = hiveConf;
-    this.authzConf = authzConf;
-    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-    this.authProvider = getAuthProvider(hiveConf, authzConf, authServer.getName());
-    this.open = true;
-    this.activeRoleSet = parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
-        authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
-  }
-
-  public HiveAuthzBinding (HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf,
-      PrivilegeCache privilegeCache) throws Exception {
-    validateHiveConfig(hiveHook, hiveConf, authzConf);
-    this.hiveConf = hiveConf;
-    this.authzConf = authzConf;
-    this.authServer = new Server(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()));
-    this.authProvider = getAuthProviderWithPrivilegeCache(authzConf, authServer.getName(), privilegeCache);
-    this.open = true;
-    this.activeRoleSet = parseActiveRoleSet(hiveConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET,
-            authzConf.get(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, "")).trim());
-  }
-
-  private static ActiveRoleSet parseActiveRoleSet(String name)
-      throws SentryUserException {
-    return parseActiveRoleSet(name, null);
-  }
-
-  private static ActiveRoleSet parseActiveRoleSet(String name,
-      Set<TSentryRole> allowedRoles) throws SentryUserException {
-    // if unset, then we choose the default of ALL
-    if (name.isEmpty()) {
-      return ActiveRoleSet.ALL;
-    } else if (AccessConstants.NONE_ROLE.equalsIgnoreCase(name)) {
-      return new ActiveRoleSet(new HashSet<String>());
-    } else if (AccessConstants.ALL_ROLE.equalsIgnoreCase(name)) {
-      return ActiveRoleSet.ALL;
-    } else if (AccessConstants.RESERVED_ROLE_NAMES.contains(name.toUpperCase())) {
-      String msg = "Role " + name + " is reserved";
-      throw new IllegalArgumentException(msg);
-    } else {
-      if (allowedRoles != null) {
-        // check if the user has been granted the role
-        boolean foundRole = false;
-        for (TSentryRole role : allowedRoles) {
-          if (role.getRoleName().equalsIgnoreCase(name)) {
-            foundRole = true;
-            break;
-          }
-        }
-        if (!foundRole) {
-          //Set the reason for hive binding to pick up
-          throw new SentryUserException("Not authorized to set role " + name, "Not authorized to set role " + name);
-
-        }
-      }
-      return new ActiveRoleSet(Sets.newHashSet(ROLE_SET_SPLITTER.split(name)));
-    }
-  }
-
-  private void validateHiveConfig(HiveHook hiveHook, HiveConf hiveConf, HiveAuthzConf authzConf)
-      throws InvalidConfigurationException{
-    if(hiveHook.equals(HiveHook.HiveMetaStore)) {
-      validateHiveMetaStoreConfig(hiveConf, authzConf);
-    }else if(hiveHook.equals(HiveHook.HiveServer2)) {
-      validateHiveServer2Config(hiveConf, authzConf);
-    }
-  }
-
-  private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf authzConf)
-      throws InvalidConfigurationException{
-    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
-        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
-    LOG.debug("Testing mode is " + isTestingMode);
-    if(!isTestingMode) {
-      boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
-      if(!sasl) {
-        throw new InvalidConfigurationException(
-            ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in non-testing mode");
-      }
-    } else {
-      boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI);
-      if(!setUgi) {
-        throw new InvalidConfigurationException(
-            ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false in non secure mode");
-      }
-    }
-  }
-
-  private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf authzConf)
-      throws InvalidConfigurationException{
-    boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty(
-        authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim());
-    LOG.debug("Testing mode is " + isTestingMode);
-    if(!isTestingMode) {
-      String authMethod = Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim();
-      if("none".equalsIgnoreCase(authMethod)) {
-        throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION +
-            " can't be none in non-testing mode");
-      }
-      boolean impersonation = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-      boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty(
-          authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim());
-
-      if(impersonation && !allowImpersonation) {
-        LOG.error("Role based authorization does not work with HiveServer2 impersonation");
-        throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS +
-            " can't be set to true in non-testing mode");
-      }
-    }
-    String defaultUmask = hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
-    if("077".equalsIgnoreCase(defaultUmask)) {
-      LOG.error("HiveServer2 required a default umask of 077");
-      throw new InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY +
-          " should be 077 in non-testing mode");
-    }
-  }
-
-  // Instantiate the configured authz provider
-  public static AuthorizationProvider getAuthProvider(HiveConf hiveConf, HiveAuthzConf authzConf,
-        String serverName) throws Exception {
-    // get the provider class and resources from the authz config
-    String authProviderName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
-    String resourceName =
-        authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
-    String providerBackendName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar());
-    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
-
-    LOG.debug("Using authorization provider " + authProviderName +
-        " with resource " + resourceName + ", policy engine "
-        + policyEngineName + ", provider backend " + providerBackendName);
-      // load the provider backend class
-      Constructor<?> providerBackendConstructor =
-        Class.forName(providerBackendName).getDeclaredConstructor(Configuration.class, String.class);
-      providerBackendConstructor.setAccessible(true);
-    ProviderBackend providerBackend = (ProviderBackend) providerBackendConstructor.
-        newInstance(new Object[] {authzConf, resourceName});
-
-    // load the policy engine class
-    Constructor<?> policyConstructor =
-      Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
-    policyConstructor.setAccessible(true);
-    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
-        newInstance(new Object[] {serverName, providerBackend});
-
-
-    // load the authz provider class
-    Constructor<?> constrctor =
-      Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
-    constrctor.setAccessible(true);
-    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
-  }
-
-  // Instantiate the authz provider using PrivilegeCache, this method is used for metadata filter function.
-  public static AuthorizationProvider getAuthProviderWithPrivilegeCache(HiveAuthzConf authzConf,
-      String serverName, PrivilegeCache privilegeCache) throws Exception {
-    // get the provider class and resources from the authz config
-    String authProviderName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
-    String resourceName =
-            authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
-    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
-
-    LOG.debug("Using authorization provider " + authProviderName +
-            " with resource " + resourceName + ", policy engine "
-            + policyEngineName + ", provider backend SimpleCacheProviderBackend");
-
-    ProviderBackend providerBackend = new SimpleCacheProviderBackend(authzConf, resourceName);
-    ProviderBackendContext context = new ProviderBackendContext();
-    context.setBindingHandle(privilegeCache);
-    providerBackend.initialize(context);
-
-    // load the policy engine class
-    Constructor<?> policyConstructor =
-            Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
-    policyConstructor.setAccessible(true);
-    PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
-            newInstance(new Object[] {serverName, providerBackend});
-
-    // load the authz provider class
-    Constructor<?> constrctor =
-            Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
-    constrctor.setAccessible(true);
-    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
-  }
-
-
-  /**
-   * Validate the privilege for the given operation for the given subject
-   * @param hiveOp
-   * @param stmtAuthPrivileges
-   * @param subject
-   * @param currDB
-   * @param inputEntities
-   * @param outputEntities
-   * @throws AuthorizationException
-   */
-  public void authorize(HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges,
-      Subject subject, List<List<DBModelAuthorizable>> inputHierarchyList,
-      List<List<DBModelAuthorizable>> outputHierarchyList)
-          throws AuthorizationException {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    boolean isDebug = LOG.isDebugEnabled();
-    if(isDebug) {
-      LOG.debug("Going to authorize statement " + hiveOp.name() +
-          " for subject " + subject.getName());
-    }
-
-    /* for each read and write entity captured by the compiler -
-     *    check if that object type is part of the input/output privilege list
-     *    If it is, then validate the access.
-     * Note the hive compiler gathers information on additional entities like partitions,
-     * etc which are not of our interest at this point. Hence its very
-     * much possible that the we won't be validating all the entities in the given list
-     */
-
-    // Check read entities
-    Map<AuthorizableType, EnumSet<DBModelAction>> requiredInputPrivileges =
-        stmtAuthPrivileges.getInputPrivileges();
-    if(isDebug) {
-      LOG.debug("requiredInputPrivileges = " + requiredInputPrivileges);
-      LOG.debug("inputHierarchyList = " + inputHierarchyList);
-    }
-    Map<AuthorizableType, EnumSet<DBModelAction>> requiredOutputPrivileges =
-        stmtAuthPrivileges.getOutputPrivileges();
-    if(isDebug) {
-      LOG.debug("requiredOuputPrivileges = " + requiredOutputPrivileges);
-      LOG.debug("outputHierarchyList = " + outputHierarchyList);
-    }
-
-    boolean found = false;
-    for (Map.Entry<AuthorizableType, EnumSet<DBModelAction>> entry : requiredInputPrivileges.entrySet()) {
-      AuthorizableType key = entry.getKey();
-      for (List<DBModelAuthorizable> inputHierarchy : inputHierarchyList) {
-        if (getAuthzType(inputHierarchy).equals(key)) {
-          found = true;
-          if (!authProvider.hasAccess(subject, inputHierarchy, entry.getValue(), activeRoleSet)) {
-            throw new AuthorizationException("User " + subject.getName() +
-                " does not have privileges for " + hiveOp.name());
-          }
-        }
-      }
-      if (!found && !key.equals(AuthorizableType.URI) && !(hiveOp.equals(HiveOperation.QUERY))
-          && !(hiveOp.equals(HiveOperation.CREATETABLE_AS_SELECT))) {
-        //URI privileges are optional for some privileges: anyPrivilege, tableDDLAndOptionalUriPrivilege
-        //Query can mean select/insert/analyze where all of them have different required privileges.
-        //CreateAsSelect can has table/columns privileges with select.
-        //For these alone we skip if there is no equivalent input privilege
-        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
-        //the input privileges correctly
-        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in input privileges");
-      }
-      found = false;
-    }
-
-    for(AuthorizableType key: requiredOutputPrivileges.keySet()) {
-      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
-        if (getAuthzType(outputHierarchy).equals(key)) {
-          found = true;
-          if (!authProvider.hasAccess(subject, outputHierarchy, requiredOutputPrivileges.get(key), activeRoleSet)) {
-            throw new AuthorizationException("User " + subject.getName() +
-                " does not have privileges for " + hiveOp.name());
-          }
-        }
-      }
-      if(!found && !(key.equals(AuthorizableType.URI)) &&  !(hiveOp.equals(HiveOperation.QUERY))) {
-        //URI privileges are optional for some privileges: tableInsertPrivilege
-        //Query can mean select/insert/analyze where all of them have different required privileges.
-        //For these alone we skip if there is no equivalent output privilege
-        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
-        //the output privileges correctly
-        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in output privileges");
-      }
-      found = false;
-    }
-
-  }
-
-  public void setActiveRoleSet(String activeRoleSet,
-      Set<TSentryRole> allowedRoles) throws SentryUserException {
-    this.activeRoleSet = parseActiveRoleSet(activeRoleSet, allowedRoles);
-    hiveConf.set(HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET, activeRoleSet);
-  }
-
-  public ActiveRoleSet getActiveRoleSet() {
-    return activeRoleSet;
-  }
-
-  public Set<String> getGroups(Subject subject) {
-    return authProvider.getGroupMapping().getGroups(subject.getName());
-  }
-
-  public Server getAuthServer() {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    return authServer;
-  }
-
-  public HiveAuthzConf getAuthzConf() {
-    return authzConf;
-  }
-
-  public HiveConf getHiveConf() {
-    return hiveConf;
-  }
-
-  private AuthorizableType getAuthzType (List<DBModelAuthorizable> hierarchy){
-    return hierarchy.get(hierarchy.size() -1).getAuthzType();
-  }
-
-  public List<String> getLastQueryPrivilegeErrors() {
-    if (!open) {
-      throw new IllegalStateException("Binding has been closed");
-    }
-    return authProvider.getLastFailedPrivileges();
-  }
-
-  public void close() {
-    authProvider.close();
-  }
-
-  public AuthorizationProvider getCurrentAuthProvider() {
-    return authProvider;
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
deleted file mode 100644
index f164b30..0000000
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivileges.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.binding.hive.authz;
-
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.sentry.core.model.db.DBModelAction;
-import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
-
-/**
- * Hive objects with required access privileges mapped to auth provider privileges
- */
-public class HiveAuthzPrivileges {
-
-  /**
-   * Operation type used for privilege granting
-   */
-  public static enum HiveOperationType {
-    UNKNOWN,
-    DDL,
-    DML,
-    DATA_LOAD,
-    DATA_UNLOAD,
-    QUERY,
-    INFO
-  };
-
-  /**
-   * scope of the operation. The auth provider interface has different methods
-   * for some of these. Hence we want to be able to identity the auth scope of
-   * a statement eg. server level or DB level etc.
-   */
-  public static enum HiveOperationScope {
-    UNKNOWN,
-    SERVER,
-    DATABASE,
-    TABLE,
-    FUNCTION,
-    CONNECT,
-    COLUMN
-  }
-
-  public static enum HiveExtendedOperation {
-    TRANSFORM,
-    RESOURCE
-  }
-
-  public static class AuthzPrivilegeBuilder {
-    private final Map<AuthorizableType, EnumSet<DBModelAction>> inputPrivileges =
-        new HashMap<AuthorizableType ,EnumSet<DBModelAction>>();
-    private final Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges =
-        new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-    private HiveOperationType operationType;
-    private HiveOperationScope operationScope;
-
-    public AuthzPrivilegeBuilder addInputObjectPriviledge(AuthorizableType inputObjectType, EnumSet<DBModelAction> inputPrivilege) {
-      inputPrivileges.put(inputObjectType, inputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder addOutputEntityPriviledge(AuthorizableType outputEntityType, EnumSet<DBModelAction> outputPrivilege) {
-      outputPrivileges.put(outputEntityType, outputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder addOutputObjectPriviledge(AuthorizableType outputObjectType, EnumSet<DBModelAction> outputPrivilege) {
-      outputPrivileges.put(outputObjectType, outputPrivilege);
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder setOperationType(HiveOperationType operationType) {
-      this.operationType = operationType;
-      return this;
-    }
-
-    public AuthzPrivilegeBuilder setOperationScope(HiveOperationScope operationScope) {
-      this.operationScope = operationScope;
-      return this;
-    }
-
-    public HiveAuthzPrivileges build() {
-      if (operationScope.equals(HiveOperationScope.UNKNOWN)) {
-        throw new UnsupportedOperationException("Operation scope is not set");
-      }
-
-      if (operationType.equals(HiveOperationType.UNKNOWN)) {
-        throw new UnsupportedOperationException("Operation scope is not set");
-      }
-
-      return new HiveAuthzPrivileges(inputPrivileges, outputPrivileges, operationType, operationScope);
-    }
-  }
-
-  private final Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges =
-      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-  private final Map<AuthorizableType,EnumSet<DBModelAction>>  outputPrivileges =
-      new HashMap<AuthorizableType,EnumSet<DBModelAction>>();
-  private final HiveOperationType operationType;
-  private final HiveOperationScope operationScope;
-
-  protected HiveAuthzPrivileges(Map<AuthorizableType,EnumSet<DBModelAction>> inputPrivileges,
-      Map<AuthorizableType,EnumSet<DBModelAction>> outputPrivileges, HiveOperationType operationType,
-      HiveOperationScope operationScope) {
-    this.inputPrivileges.putAll(inputPrivileges);
-    this.outputPrivileges.putAll(outputPrivileges);
-    this.operationScope = operationScope;
-    this.operationType = operationType;
-  }
-
-  /**
-   * @return the inputPrivileges
-   */
-  public Map<AuthorizableType, EnumSet<DBModelAction>> getInputPrivileges() {
-    return inputPrivileges;
-  }
-
-  /**
-   * @return the outputPrivileges
-   */
-  public Map<AuthorizableType, EnumSet<DBModelAction>> getOutputPrivileges() {
-    return outputPrivileges;
-  }
-
-  /**
-   * @return the operationType
-   */
-  public HiveOperationType getOperationType() {
-    return operationType;
-  }
-
-  /**
-   * @return the operationScope
-   */
-  public HiveOperationScope getOperationScope() {
-    return operationScope;
-  }
-}