You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2016/05/28 00:50:16 UTC

hive git commit: HIVE-13857: insert overwrite select from some table fails throwing org.apache.hadoop.security.AccessControlException - II(Hari Subramaniyan, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 4e3da98d7 -> 0d67cb0b7


HIVE-13857: insert overwrite select from some table fails throwing org.apache.hadoop.security.AccessControlException - II(Hari Subramaniyan, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0d67cb0b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0d67cb0b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0d67cb0b

Branch: refs/heads/master
Commit: 0d67cb0b7d8a08ea7ce7bbe43f0ba37d258cd5be
Parents: 4e3da98
Author: Hari Subramaniyan <ha...@apache.org>
Authored: Fri May 27 17:50:07 2016 -0700
Committer: Hari Subramaniyan <ha...@apache.org>
Committed: Fri May 27 17:50:07 2016 -0700

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java   |  4 +++-
 ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java  |  6 ++++--
 .../main/java/org/apache/hadoop/hive/io/HdfsUtils.java    | 10 +++++++++-
 3 files changed, 16 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0d67cb0b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index bc39994..00bff6b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -4232,10 +4232,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       for (Path location : getLocations(db, table, partSpec)) {
         FileSystem fs = location.getFileSystem(conf);
         HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location);
+        FileStatus targetStatus = fs.getFileStatus(location);
+        String targetGroup = targetStatus == null ? null : targetStatus.getGroup();
         fs.delete(location, true);
         fs.mkdirs(location);
         try {
-          HdfsUtils.setFullFileStatus(conf, status, fs, location, false);
+          HdfsUtils.setFullFileStatus(conf, status, targetGroup, fs, location, false);
         } catch (Exception e) {
           LOG.warn("Error setting permissions of " + location, e);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/0d67cb0b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 379eddc..ee6c564 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -2688,6 +2688,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
             }
 
             Path destPath = new Path(destf, srcP.getName());
+            String srcGroup = srcFile.getGroup();
             if (!needToCopy && !isSrcLocal) {
               for (int counter = 1; !destFs.rename(srcP,destPath); counter++) {
                 destPath = new Path(destf, name + ("_copy_" + counter) + filetype);
@@ -2697,7 +2698,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
             }
 
             if (inheritPerms) {
-              HdfsUtils.setFullFileStatus(conf, fullDestStatus, destFs, destPath, false);
+              HdfsUtils.setFullFileStatus(conf, fullDestStatus, srcGroup, destFs, destPath, false);
             }
             if (null != newFiles) {
               newFiles.add(destPath);
@@ -2871,10 +2872,11 @@ private void constructOneLBLocationMap(FileStatus fSta,
                 public Void call() throws Exception {
                   SessionState.setCurrentSessionState(parentSession);
                   Path destPath = new Path(destf, status.getPath().getName());
+                  String group = status.getGroup();
                   try {
                     if(destFs.rename(status.getPath(), destf)) {
                       if (inheritPerms) {
-                        HdfsUtils.setFullFileStatus(conf, desiredStatus, destFs, destPath, false);
+                        HdfsUtils.setFullFileStatus(conf, desiredStatus, group, destFs, destPath, false);
                       }
                     } else {
                       throw new IOException("rename for src path: " + status.getPath() + " to dest path:"

http://git-wip-us.apache.org/repos/asf/hive/blob/0d67cb0b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
index c2060fc..70a6857 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
@@ -58,6 +58,11 @@ public class HdfsUtils {
 
   public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
       FileSystem fs, Path target, boolean recursion) throws IOException {
+    setFullFileStatus(conf, sourceStatus, null, fs, target, recursion);
+  }
+
+  public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
+    String targetGroup, FileSystem fs, Path target, boolean recursion) throws IOException {
     FileStatus fStatus= sourceStatus.getFileStatus();
     String group = fStatus.getGroup();
     boolean aclEnabled = Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true");
@@ -111,7 +116,10 @@ public class HdfsUtils {
       }
     } else {
       if (group != null && !group.isEmpty()) {
-        fs.setOwner(target, null, group);
+        if (targetGroup == null ||
+            !group.equals(targetGroup)) {
+          fs.setOwner(target, null, group);
+        }
       }
       if (aclEnabled) {
         if (null != aclEntries) {