You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by ja...@apache.org on 2014/05/08 17:58:17 UTC

git commit: SENTRY-176: Not able to read policy files on HDFS (Regression) (Sravya Tirukkovalur via Jarek Jarcec Cecho)

Repository: incubator-sentry
Updated Branches:
  refs/heads/master 1a72f6197 -> 4adf59a69


SENTRY-176: Not able to read policy files on HDFS (Regression) (Sravya Tirukkovalur via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/4adf59a6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/4adf59a6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/4adf59a6

Branch: refs/heads/master
Commit: 4adf59a691e83fa625fb7b1d24b7934c323a5a02
Parents: 1a72f61
Author: Jarek Jarcec Cecho <ja...@apache.org>
Authored: Thu May 8 08:57:05 2014 -0700
Committer: Jarek Jarcec Cecho <ja...@apache.org>
Committed: Thu May 8 08:57:05 2014 -0700

----------------------------------------------------------------------
 .../sentry/binding/hive/conf/HiveAuthzConf.java | 16 +++--
 .../apache/sentry/provider/file/PolicyFile.java | 70 ++++----------------
 .../sentry/provider/file/PolicyFiles.java       | 19 +++---
 .../file/SimpleFileProviderBackend.java         | 55 ++++++++-------
 .../e2e/hive/AbstractTestWithHiveServer.java    | 10 ++-
 .../AbstractTestWithStaticConfiguration.java    | 32 +++++----
 .../sentry/tests/e2e/hive/TestCrossDbOps.java   | 35 +++++++---
 .../e2e/hive/TestPerDatabasePolicyFile.java     | 14 ++--
 .../tests/e2e/hive/TestServerConfiguration.java | 32 ++++-----
 .../sentry/tests/e2e/hive/fs/AbstractDFS.java   | 20 +++++-
 .../sentry/tests/e2e/hive/fs/ClusterDFS.java    |  7 +-
 .../apache/sentry/tests/e2e/hive/fs/DFS.java    |  4 ++
 .../sentry/tests/e2e/hive/fs/MiniDFS.java       | 10 +--
 .../e2e/hive/hiveserver/HiveServerFactory.java  | 34 ++++------
 14 files changed, 169 insertions(+), 189 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
index e162bbd..7b7bf8e 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
@@ -16,18 +16,18 @@
  */
 package org.apache.sentry.binding.hive.conf;
 
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.mortbay.log.Log;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
 
 public class HiveAuthzConf extends Configuration {
 
@@ -146,9 +146,11 @@ public class HiveAuthzConf extends Configuration {
   private final String hiveAuthzSiteFile;
 
   public HiveAuthzConf(URL hiveAuthzSiteURL) {
-    super(false);
+    super();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
     addResource(hiveAuthzSiteURL);
     applySystemProperties();
+    LOG.info("DefaultFS: " + super.get("fs.defaultFS"));
     this.hiveAuthzSiteFile = hiveAuthzSiteURL.toString();
   }
   /**

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFile.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFile.java b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFile.java
index 0189f85..19bf8cc 100644
--- a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFile.java
+++ b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFile.java
@@ -17,28 +17,26 @@
 
 package org.apache.sentry.provider.file;
 
-import static org.apache.sentry.provider.file.PolicyFileConstants.DATABASES;
-import static org.apache.sentry.provider.file.PolicyFileConstants.GROUPS;
-import static org.apache.sentry.provider.file.PolicyFileConstants.ROLES;
-import static org.apache.sentry.provider.file.PolicyFileConstants.USERS;
-
-import java.io.File;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Multimap;
 import com.google.common.io.Files;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.sentry.provider.file.PolicyFileConstants.DATABASES;
+import static org.apache.sentry.provider.file.PolicyFileConstants.GROUPS;
+import static org.apache.sentry.provider.file.PolicyFileConstants.ROLES;
+import static org.apache.sentry.provider.file.PolicyFileConstants.USERS;
 
 /**
  * PolicyFile creator. Written specifically to be used with tests. Specifically
@@ -134,50 +132,6 @@ public class PolicyFile {
     LOGGER.info("Writing policy file to " + file + ":\n" + contents);
     Files.write(contents, file, Charsets.UTF_8);
 
-    String hiveServer2 = System.getProperty("sentry.e2etest.hiveServer2Type", "InternalHiveServer2");
-
-    //Currently policyOnHDFS is only supported for UnmanagedHiveServer, and global policy file is required to be on hdfs
-    if(hiveServer2.equals("UnmanagedHiveServer2")) {
-      String policyOnHDFS = System.getProperty("sentry.e2etest.hive.policyOnHDFS", "true");
-      if(policyOnHDFS.trim().equalsIgnoreCase("true") || file.getName().equalsIgnoreCase("sentry-provider.ini")){
-        String policyLocation = System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry");
-        String policyFileLocation = policyLocation + "/" + file.getName();
-        LOGGER.info("Moving policy file to " + policyFileLocation);
-        String userKeytab = System.getProperty("sentry.e2etest.hive.policyOwnerKeytab");
-        String userPrincipal = System.getProperty("sentry.e2etest.hive.policyOwnerPrincipal");
-        Preconditions.checkNotNull(userKeytab);
-        Preconditions.checkNotNull(userPrincipal);
-        hdfsPut(file, policyFileLocation, userKeytab, userPrincipal);
-      }
-    }
-  }
-
-  private void hdfsPut(File file, String hdfsPath, String userKeytab, String userPrincipal) throws Exception {
-    String command, status;
-    Process p;
-
-    command = "kinit -kt " + userKeytab +  " " + userPrincipal;
-    p = Runtime.getRuntime().exec(command);
-    if(p.waitFor()!=0) {
-      throw new Exception("Setup incomplete. " + command + " FAILED");
-    }
-    else {
-      LOGGER.info("Command:" + command + " PASSED");
-    }
-
-    command = "hdfs dfs -rm " + hdfsPath;
-    p = Runtime.getRuntime().exec(command);
-    status = (p.waitFor()==0)?"PASSED":"FAILED";
-    LOGGER.warn("Command:" + command + " " + status);
-
-    command = "hdfs dfs -put " + file.getAbsolutePath() + " " + hdfsPath;
-    p = Runtime.getRuntime().exec(command);
-    if(p.waitFor()!=0) {
-      throw new Exception("Setup incomplete. " + command + " FAILED");
-    }
-    else {
-      LOGGER.info("Command:" + command + " PASSED");
-    }
   }
 
   private String getSection(String name, Map<String, String> mapping) {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFiles.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFiles.java b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFiles.java
index a908ec3..4f05be6 100644
--- a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFiles.java
+++ b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/PolicyFiles.java
@@ -16,13 +16,8 @@
  */
 package org.apache.sentry.provider.file;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
+import com.google.common.io.ByteStreams;
+import com.google.common.io.Resources;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,8 +25,12 @@ import org.apache.shiro.config.Ini;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.io.ByteStreams;
-import com.google.common.io.Resources;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
 
 public class PolicyFiles {
 
@@ -74,6 +73,8 @@ public class PolicyFiles {
     InputStream inputStream = null;
     try {
       LOGGER.info("Opening " + path);
+      String dfsUri = fileSystem.getDefaultUri(fileSystem.getConf()).toString();
+      LOGGER.error("dfsUri " + dfsUri);
       inputStream = fileSystem.open(path);
       Ini ini = new Ini();
       ini.load(inputStream);

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java
index 2dadc47..6e8f02f 100644
--- a/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java
+++ b/sentry-provider/sentry-provider-file/src/main/java/org/apache/sentry/provider/file/SimpleFileProviderBackend.java
@@ -16,21 +16,19 @@
  */
 package org.apache.sentry.provider.file;
 
-import static org.apache.sentry.provider.file.PolicyFileConstants.DATABASES;
-import static org.apache.sentry.provider.file.PolicyFileConstants.GROUPS;
-import static org.apache.sentry.provider.file.PolicyFileConstants.ROLES;
-import static org.apache.sentry.provider.file.PolicyFileConstants.ROLE_SPLITTER;
-import static org.apache.sentry.provider.file.PolicyFileConstants.USERS;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.annotation.Nullable;
-
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.HashBasedTable;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Interner;
+import com.google.common.collect.Interners;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Multimap;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
+import com.google.common.collect.Table.Cell;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -45,19 +43,19 @@ import org.apache.shiro.config.Ini;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.HashBasedTable;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Interner;
-import com.google.common.collect.Interners;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Multimap;
-import com.google.common.collect.Sets;
-import com.google.common.collect.Table;
-import com.google.common.collect.Table.Cell;
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.sentry.provider.file.PolicyFileConstants.DATABASES;
+import static org.apache.sentry.provider.file.PolicyFileConstants.GROUPS;
+import static org.apache.sentry.provider.file.PolicyFileConstants.ROLES;
+import static org.apache.sentry.provider.file.PolicyFileConstants.ROLE_SPLITTER;
+import static org.apache.sentry.provider.file.PolicyFileConstants.USERS;
 
 public class SimpleFileProviderBackend implements ProviderBackend {
 
@@ -207,6 +205,7 @@ public class SimpleFileProviderBackend implements ProviderBackend {
     Table<String, String, Set<String>> groupRolePrivilegeTableTemp = HashBasedTable.create();
     Ini ini;
     LOGGER.info("Parsing " + resourcePath);
+    LOGGER.info("Filesystem: " + fileSystem.getUri());
     try {
       try {
         ini = PolicyFiles.loadFromPath(fileSystem, resourcePath);

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
index 0258c2d..9b3c04a 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithHiveServer.java
@@ -16,11 +16,8 @@
  */
 package org.apache.sentry.tests.e2e.hive;
 
-import java.io.File;
-import java.util.Map;
-
+import com.google.common.io.Files;
 import junit.framework.Assert;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -30,7 +27,8 @@ import org.junit.After;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.io.Files;
+import java.io.File;
+import java.util.Map;
 
 public abstract class AbstractTestWithHiveServer {
   private static final Logger LOGGER = LoggerFactory
@@ -61,7 +59,7 @@ public abstract class AbstractTestWithHiveServer {
     confDir = assertCreateDir(new File(baseDir, "etc"));
     dataDir = assertCreateDir(new File(baseDir, "data"));
     policyFile = new File(confDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
-    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile, fileSystem);
+    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFile.getPath(), fileSystem);
     hiveServer.start();
     return new Context(hiveServer, getFileSystem(),
         baseDir, confDir, dataDir, policyFile);

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
index 6444407..2b36d80 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
@@ -16,14 +16,9 @@
  */
 package org.apache.sentry.tests.e2e.hive;
 
-import java.io.File;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.Map;
-
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
 import junit.framework.Assert;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.sentry.tests.e2e.hive.fs.DFS;
@@ -36,8 +31,11 @@ import org.junit.BeforeClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Maps;
-import com.google.common.io.Files;
+import java.io.File;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Map;
 
 public abstract class AbstractTestWithStaticConfiguration {
   private static final Logger LOGGER = LoggerFactory
@@ -76,7 +74,7 @@ public abstract class AbstractTestWithStaticConfiguration {
       VIEW3 = "view_3",
       INDEX1 = "index_1",
       INDEX2 = "index_2";
-
+  protected static boolean policy_on_hdfs = false;
 
   protected static File baseDir;
   protected static File logDir;
@@ -150,9 +148,19 @@ public abstract class AbstractTestWithStaticConfiguration {
 
     String dfsType = System.getProperty(DFSFactory.FS_TYPE);
     dfs = DFSFactory.create(dfsType, baseDir);
-
     fileSystem = dfs.getFileSystem();
-    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyFileLocation, fileSystem);
+
+    String policyURI;
+    if (policy_on_hdfs) {
+      String dfsUri = fileSystem.getDefaultUri(fileSystem.getConf()).toString();
+      LOGGER.error("dfsUri " + dfsUri);
+      policyURI = dfsUri + System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry");
+      policyURI += "/" + HiveServerFactory.AUTHZ_PROVIDER_FILENAME;
+    } else {
+      policyURI = policyFileLocation.getPath();
+    }
+
+    hiveServer = HiveServerFactory.create(properties, baseDir, confDir, logDir, policyURI, fileSystem);
     hiveServer.start();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
index 836cb27..7ef3975 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestCrossDbOps.java
@@ -17,8 +17,13 @@
 
 package org.apache.sentry.tests.e2e.hive;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import com.google.common.io.Resources;
+import junit.framework.Assert;
+import org.apache.sentry.provider.file.PolicyFile;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -30,14 +35,8 @@ import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.Assert;
-
-import org.apache.sentry.provider.file.PolicyFile;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.io.Resources;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /* Tests privileges at table scope with cross database access */
 
@@ -46,6 +45,11 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
   private PolicyFile policyFile;
   private String loadData;
 
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception{
+    policy_on_hdfs = true;
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
   @Before
   public void setup() throws Exception {
     context = createContext();
@@ -84,6 +88,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("insert_tab2", "server=server1->db=db2->table=tab2->action=insert")
         .setUserGroupMapping(StaticUserGroup.getStaticMapping());
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     // admin create two databases
     Connection connection = context.createConnection(ADMIN1);
@@ -200,6 +205,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("insert_tab2", "server=server1->db=db2->table=tab2->action=insert")
         .setUserGroupMapping(StaticUserGroup.getStaticMapping());
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     // admin create two databases
     Connection connection = context.createConnection(ADMIN1);
@@ -358,6 +364,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
         .setUserGroupMapping(StaticUserGroup.getStaticMapping());
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     dropDb(ADMIN1, DB1, DB2);
     createDb(ADMIN1, DB1, DB2);
@@ -388,6 +395,8 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
     policyFile
         .setUserGroupMapping(StaticUserGroup.getStaticMapping())
         .write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
+
     dropDb(ADMIN1, DB1);
     createDb(ADMIN1, DB1);
     Connection adminCon = context.createConnection(ADMIN1);
@@ -417,6 +426,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("db1_tab1_insert", "server=server1->db=db1->table=table_1->action=insert")
         .setUserGroupMapping(StaticUserGroup.getStaticMapping());
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     Connection adminCon = context.createConnection(ADMIN1);
     Statement adminStmt = context.createStatement(adminCon);
@@ -447,6 +457,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addRolesToGroup(USERGROUP1, "db1_tab2_all")
         .setUserGroupMapping(StaticUserGroup.getStaticMapping());
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     dropDb(ADMIN1, DB1);
     createDb(ADMIN1, DB1);
@@ -493,6 +504,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("db1_tab1_select", "server=server1->db=db1->table=table_1->action=select")
         .setUserGroupMapping(StaticUserGroup.getStaticMapping())
         .write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     // create dbs
     Connection adminCon = context.createConnection(ADMIN1);
@@ -570,6 +582,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addRolesToGroup(USERGROUP1, GROUP1_ROLE)
         .setUserGroupMapping(StaticUserGroup.getStaticMapping())
         .write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     dropDb(ADMIN1, DB1, DB2);
     createDb(ADMIN1, DB1, DB2);
@@ -610,6 +623,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
 
     policyFile.removePermissionsFromRole(GROUP1_ROLE, ALL_DB2);
     policyFile.write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     // create db1.view1 as select from db2.tbl2
     statement.execute("DROP VIEW IF EXISTS " + DB1 + "." + VIEW2);
@@ -648,6 +662,7 @@ public class TestCrossDbOps extends AbstractTestWithStaticConfiguration {
         .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath())
         .setUserGroupMapping(StaticUserGroup.getStaticMapping())
         .write(context.getPolicyFile());
+    dfs.writePolicyFile(context.getPolicyFile());
 
     // admin create two databases
     dropDb(ADMIN1, DB1, DB2);

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPerDatabasePolicyFile.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPerDatabasePolicyFile.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPerDatabasePolicyFile.java
index 805d60f..5c7c7d1 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPerDatabasePolicyFile.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPerDatabasePolicyFile.java
@@ -17,19 +17,16 @@
 
 package org.apache.sentry.tests.e2e.hive;
 
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.Statement;
-
+import com.google.common.io.Resources;
 import org.apache.sentry.provider.file.PolicyFile;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.io.Resources;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.Statement;
 
 public class TestPerDatabasePolicyFile extends AbstractTestWithStaticConfiguration {
   private static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat";
@@ -45,7 +42,6 @@ public class TestPerDatabasePolicyFile extends AbstractTestWithStaticConfigurati
     context = createContext();
     globalPolicyFile = context.getPolicyFile();
     dataDir = context.getDataDir();
-    assertTrue("Could not delete " + globalPolicyFile, context.deletePolicyFile());
     dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
     FileOutputStream to = new FileOutputStream(dataFile);
     Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestServerConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestServerConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestServerConfiguration.java
index 6bb39bc..c05bb4f 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestServerConfiguration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestServerConfiguration.java
@@ -17,18 +17,9 @@
 
 package org.apache.sentry.tests.e2e.hive;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Map;
-
+import com.google.common.base.Charsets;
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.sentry.binding.hive.HiveAuthzBindingSessionHook;
@@ -40,9 +31,17 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.base.Charsets;
-import com.google.common.collect.Maps;
-import com.google.common.io.Files;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Map;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 public class TestServerConfiguration extends AbstractTestWithHiveServer {
 
@@ -113,8 +112,6 @@ public class TestServerConfiguration extends AbstractTestWithHiveServer {
   @Test
   public void testRemovalOfPolicyFile() throws Exception {
     context = createContext(properties);
-    File policyFile = context.getPolicyFile();
-    assertTrue("Could not delete " + policyFile, policyFile.delete());
     Connection connection = context.createConnection(ADMIN1);
     Statement statement = context.createStatement(connection);
     try {
@@ -132,7 +129,6 @@ public class TestServerConfiguration extends AbstractTestWithHiveServer {
   public void testCorruptionOfPolicyFile() throws Exception {
     context = createContext(properties);
     File policyFile = context.getPolicyFile();
-    assertTrue("Could not delete " + policyFile, policyFile.delete());
     FileOutputStream out = new FileOutputStream(policyFile);
     out.write("this is not valid".getBytes(Charsets.UTF_8));
     out.close();

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java
index 145584d..8b1345d 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java
@@ -16,16 +16,22 @@
  */
 package org.apache.sentry.tests.e2e.hive.fs;
 
-import java.io.IOException;
-
 import junit.framework.Assert;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
 
 public abstract class AbstractDFS implements DFS{
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(AbstractDFS.class);
   protected static FileSystem fileSystem;
   protected static Path dfsBaseDir;
+  public Path sentryDir;
+
 
   @Override
   public FileSystem getFileSystem(){
@@ -53,6 +59,14 @@ public abstract class AbstractDFS implements DFS{
     return dfsBaseDir;
   }
 
+  @Override
+  public void writePolicyFile(File srcFile) throws IOException {
+    String policyFileName = srcFile.getName();
+    Path destPath = new Path(sentryDir, policyFileName);
+    fileSystem.copyFromLocalFile(true, true, new Path(srcFile.getAbsolutePath()), destPath);
+    LOGGER.info("Copied file to HDFS: " + destPath.toString());
+  }
+
   protected void cleanBaseDir() throws Exception {
    cleanDir(dfsBaseDir);
   }

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java
index d5db811..a16bb38 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java
@@ -16,9 +16,6 @@
  */
 package org.apache.sentry.tests.e2e.hive.fs;
 
-import java.security.PrivilegedExceptionAction;
-import java.util.Random;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -26,13 +23,15 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.security.PrivilegedExceptionAction;
+import java.util.Random;
+
 public class ClusterDFS extends AbstractDFS{
   private static final Logger LOGGER = LoggerFactory
       .getLogger(ClusterDFS.class);
   public static final String TEST_USER = "sentry.e2etest.hive.test.user";
   private static final String testUser = System.getProperty(TEST_USER, "hive");
   private static final String KEYTAB_LOCATION = System.getProperty("sentry.e2e.hive.keytabs.location");
-  private Path sentryDir;
   private UserGroupInformation ugi;
 
   ClusterDFS() throws Exception{

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java
index 9e9bb27..872a084 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java
@@ -19,10 +19,14 @@ package org.apache.sentry.tests.e2e.hive.fs;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import java.io.File;
+import java.io.IOException;
+
 public interface DFS {
   public FileSystem getFileSystem();
   public void tearDown() throws Exception;
   public Path assertCreateDir(String dir) throws Exception;
   public Path getBaseDir();
   public void createBaseDir() throws Exception;
+  public void writePolicyFile(File srcFile) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
index de684a9..184c066 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java
@@ -16,14 +16,14 @@
  */
 package org.apache.sentry.tests.e2e.hive.fs;
 
-import java.io.File;
-
 import junit.framework.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 
+import java.io.File;
+import java.util.concurrent.TimeoutException;
+
 public class MiniDFS extends AbstractDFS {
   private static MiniDFSCluster dfsCluster;
 
@@ -33,6 +33,8 @@ public class MiniDFS extends AbstractDFS {
     conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dfsDir.getPath());
     dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
     fileSystem = dfsCluster.getFileSystem();
+    String policyDir = System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry");
+    sentryDir = super.assertCreateDfsDir(new Path(fileSystem.getUri() + policyDir));
     dfsBaseDir = assertCreateDfsDir(new Path(new Path(fileSystem.getUri()), "/base"));
   }
 
@@ -51,4 +53,4 @@ public class MiniDFS extends AbstractDFS {
     }
     return dir;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/4adf59a6/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
index f00efdb..8437845 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java
@@ -16,14 +16,8 @@
  */
 package org.apache.sentry.tests.e2e.hive.hiveserver;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.ServerSocket;
-import java.net.URL;
-import java.util.Map;
-
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.io.Resources;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -35,8 +29,13 @@ import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.io.Resources;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.ServerSocket;
+import java.net.URL;
+import java.util.Map;
 
 public class HiveServerFactory {
   private static final Logger LOGGER = LoggerFactory
@@ -70,7 +69,7 @@ public class HiveServerFactory {
   }
 
   public static HiveServer create(Map<String, String> properties,
-      File baseDir, File confDir, File logDir, File policyFile,
+      File baseDir, File confDir, File logDir, String policyFile,
       FileSystem fileSystem)
           throws Exception {
     String type = properties.get(HIVESERVER2_TYPE);
@@ -85,16 +84,8 @@ public class HiveServerFactory {
   }
   private static HiveServer create(HiveServer2Type type,
       Map<String, String> properties, File baseDir, File confDir,
-      File logDir, File policyFile, FileSystem fileSystem) throws Exception {
+      File logDir, String policyFile, FileSystem fileSystem) throws Exception {
 
-    if(policyFile.exists()) {
-      LOGGER.info("Policy file " + policyFile + " exists");
-    } else {
-      LOGGER.info("Creating policy file " + policyFile);
-      FileOutputStream to = new FileOutputStream(policyFile);
-      Resources.copy(Resources.getResource(AUTHZ_PROVIDER_FILENAME), to);
-      to.close();
-    }
     if(type.equals(HiveServer2Type.UnmanagedHiveServer2)){
       LOGGER.info("Creating UnmanagedHiveServer");
       return new UnmanagedHiveServer();
@@ -119,7 +110,8 @@ public class HiveServerFactory {
       properties.put(ACCESS_TESTING_MODE, "true");
     }
     if(!properties.containsKey(AUTHZ_PROVIDER_RESOURCE)) {
-      properties.put(AUTHZ_PROVIDER_RESOURCE, policyFile.getPath());
+      LOGGER.info("Policy File location: " + policyFile);
+      properties.put(AUTHZ_PROVIDER_RESOURCE, policyFile);
     }
     if(!properties.containsKey(AUTHZ_PROVIDER)) {
       properties.put(AUTHZ_PROVIDER, LocalGroupResourceAuthorizationProvider.class.getName());