You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ai...@apache.org on 2017/04/24 20:42:02 UTC
hive git commit: HIVE-16346: inheritPerms should be conditional based
on the target filesystem (Sahil Takiar, reviewed by Aihua Xu)
Repository: hive
Updated Branches:
refs/heads/branch-2 59faf3695 -> cce4d5e78
HIVE-16346: inheritPerms should be conditional based on the target filesystem (Sahil Takiar, reviewed by Aihua Xu)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cce4d5e7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cce4d5e7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cce4d5e7
Branch: refs/heads/branch-2
Commit: cce4d5e78582c8744972d265147d39a345e082db
Parents: 59faf36
Author: Aihua Xu <ai...@apache.org>
Authored: Mon Apr 24 16:40:57 2017 -0400
Committer: Aihua Xu <ai...@apache.org>
Committed: Mon Apr 24 16:40:57 2017 -0400
----------------------------------------------------------------------
common/pom.xml | 8 ++-
.../hadoop/hive/common/BlobStorageUtils.java | 10 ++-
.../apache/hadoop/hive/common/FileUtils.java | 39 +++++++++--
.../apache/hadoop/hive/common/StorageUtils.java | 40 +++++++++++
.../hive/common/TestBlobStorageUtils.java | 8 +--
.../hadoop/hive/common/TestStorageUtils.java | 57 +++++++++++++++
.../apache/hadoop/hive/metastore/Warehouse.java | 5 +-
.../java/org/apache/hadoop/hive/ql/Context.java | 4 +-
.../apache/hadoop/hive/ql/exec/CopyTask.java | 3 +-
.../apache/hadoop/hive/ql/exec/MoveTask.java | 4 +-
.../hadoop/hive/ql/exec/ReplCopyTask.java | 3 +-
.../apache/hadoop/hive/ql/metadata/Hive.java | 29 ++++----
.../org/apache/hadoop/hive/io/HdfsUtils.java | 73 +++++++++++---------
.../apache/hadoop/hive/io/TestHdfsUtils.java | 19 +++++
14 files changed, 230 insertions(+), 72 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index e1c15ee..84bb1e5 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -157,7 +157,13 @@
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
- </dependency>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs</artifactId>
+ <version>${hadoop.version}</version>
+ <optional>true</optional>
+ </dependency>
<!-- test inter-project -->
<dependency>
<groupId>com.google.code.tempus-fugit</groupId>
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
index e6a17cb..b7f1359 100644
--- a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java
@@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package org.apache.hadoop.hive.common;
import org.apache.hadoop.conf.Configuration;
@@ -24,21 +25,23 @@ import org.apache.hadoop.hive.conf.HiveConf;
import java.util.Collection;
+
/**
* Utilities for different blob (object) storage systems
*/
public class BlobStorageUtils {
+
private static final boolean DISABLE_BLOBSTORAGE_AS_SCRATCHDIR = false;
public static boolean isBlobStoragePath(final Configuration conf, final Path path) {
return path != null && isBlobStorageScheme(conf, path.toUri().getScheme());
}
- public static boolean isBlobStorageFileSystem(final Configuration conf, final FileSystem fs) {
- return fs != null && isBlobStorageScheme(conf, fs.getScheme());
+ static boolean isBlobStorageFileSystem(final Configuration conf, final FileSystem fs) {
+ return fs != null && fs.getUri() != null && isBlobStorageScheme(conf, fs.getUri().getScheme());
}
- public static boolean isBlobStorageScheme(final Configuration conf, final String scheme) {
+ static boolean isBlobStorageScheme(final Configuration conf, final String scheme) {
Collection<String> supportedBlobStoreSchemes =
conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname);
@@ -61,4 +64,5 @@ public class BlobStorageUtils {
HiveConf.ConfVars.HIVE_BLOBSTORE_OPTIMIZATIONS_ENABLED.defaultBoolVal
);
}
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index e586015..8ed8cc4 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -46,13 +46,11 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.io.HdfsUtils;
import org.apache.hadoop.hive.shims.HadoopShims;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.ShutdownHookManager;
import org.slf4j.Logger;
@@ -509,11 +507,29 @@ public final class FileUtils {
/**
* Creates the directory and all necessary parent directories.
+ *
+ * @param fs FileSystem to use
+ * @param f path to create.
+ * @param conf Hive configuration
+ *
+ * @return true if directory created successfully. False otherwise, including if it exists.
+ *
+ * @throws IOException exception in creating the directory
+ */
+ public static boolean mkdir(FileSystem fs, Path f, Configuration conf) throws IOException {
+ return mkdir(fs, f, shouldInheritPerms(conf, fs), conf);
+ }
+
+ /**
+ * Creates the directory and all necessary parent directories.
+ *
* @param fs FileSystem to use
* @param f path to create.
* @param inheritPerms whether directory inherits the permission of the last-existing parent path
* @param conf Hive configuration
+ *
* @return true if directory created successfully. False otherwise, including if it exists.
+ *
* @throws IOException exception in creating the directory
*/
public static boolean mkdir(FileSystem fs, Path f, boolean inheritPerms, Configuration conf) throws IOException {
@@ -601,9 +617,10 @@ public final class FileUtils {
copied = FileUtil.copy(srcFS, src, dstFS, dst, deleteSource, overwrite, conf);
}
- boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
+ boolean inheritPerms = shouldInheritPerms(conf, dstFS);
if (copied && inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, new HdfsUtils.HadoopFileStatus(conf, dstFS, dst.getParent()), dstFS, dst, true);
+ inheritPerms(conf, new HdfsUtils.HadoopFileStatus(conf, dstFS, dst.getParent()), dstFS, dst,
+ true);
}
return copied;
}
@@ -994,4 +1011,18 @@ public final class FileUtils {
return result;
}
+ public static boolean shouldInheritPerms(Configuration conf, FileSystem fs) {
+ return HiveConf.getBoolVar(conf,
+ HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && StorageUtils.shouldSetPerms(conf, fs);
+ }
+
+ public static void inheritPerms(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, FileSystem fs, Path target,
+ boolean recursive) {
+ inheritPerms(conf, sourceStatus, null, fs, target, recursive);
+ }
+
+ public static void inheritPerms(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, String targetGroup,
+ FileSystem fs, Path target, boolean recursive) {
+ HdfsUtils.setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursive);
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java b/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java
new file mode 100644
index 0000000..900e2b1
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+
+
+public class StorageUtils {
+
+ /**
+ * Returns true if permissions should be set on the given filesystem, false otherwise. Certain implementations of
+ * {@link FileSystem} don't have a concept of permissions, such as S3. This method checks to determine if the given
+ * {@link FileSystem} falls into that category.
+ *
+ * @param conf the {@link Configuration} to use when checking if permissions should be set on the {@link FileSystem}
+ * @param fs the {@link FileSystem} to check to see if permission should be set or not
+ *
+ * @return true if permissions should be set on the given {@link FileSystem}, false otherwise
+ */
+ public static boolean shouldSetPerms(Configuration conf, FileSystem fs) {
+ return !(BlobStorageUtils.areOptimizationsEnabled(conf) && BlobStorageUtils.isBlobStorageFileSystem(conf, fs));
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java b/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java
index 84a0d86..918ec95 100644
--- a/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java
+++ b/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java
@@ -64,18 +64,18 @@ public class TestBlobStorageUtils {
/* Valid FileSystem schemes */
- doReturn("s3a").when(fs).getScheme();
+ doReturn(URI.create("s3a:///")).when(fs).getUri();
assertTrue(isBlobStorageFileSystem(conf, fs));
- doReturn("swift").when(fs).getScheme();
+ doReturn(URI.create("swift:///")).when(fs).getUri();
assertTrue(isBlobStorageFileSystem(conf, fs));
/* Invalid FileSystem schemes */
- doReturn("hdfs").when(fs).getScheme();
+ doReturn(URI.create("hdfs:///")).when(fs).getUri();
assertFalse(isBlobStorageFileSystem(conf, fs));
- doReturn("").when(fs).getScheme();
+ doReturn(URI.create("")).when(fs).getUri();
assertFalse(isBlobStorageFileSystem(conf, fs));
assertFalse(isBlobStorageFileSystem(conf, null));
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java b/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java
new file mode 100644
index 0000000..638f48c
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import java.net.URI;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+public class TestStorageUtils {
+
+ private final HiveConf conf = new HiveConf();
+
+ @Before
+ public void setUp() {
+ conf.set(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname, HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.getDefaultValue());
+ }
+
+ @Test
+ public void testShouldInheritPerms() {
+ conf.set(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, "true");
+ FileSystem fs = mock(FileSystem.class);
+ for (String blobScheme : conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname)) {
+ when(fs.getUri()).thenReturn(URI.create(blobScheme + ":///"));
+ assertFalse(FileUtils.shouldInheritPerms(conf, fs));
+ }
+
+ when(fs.getUri()).thenReturn(URI.create("hdfs:///"));
+ assertTrue(FileUtils.shouldInheritPerms(conf, fs));
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
index 27283d8..c94ef07 100755
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -33,8 +33,10 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
+
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
@@ -186,11 +188,10 @@ public class Warehouse {
}
public boolean mkdirs(Path f, boolean inheritPermCandidate) throws MetaException {
- boolean inheritPerms = HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && inheritPermCandidate;
FileSystem fs = null;
try {
fs = getFs(f);
+ boolean inheritPerms = FileUtils.shouldInheritPerms(conf, fs) && inheritPermCandidate;
return FileUtils.mkdir(fs, f, inheritPerms, conf);
} catch (IOException e) {
MetaStoreUtils.logAndThrowMetaException(e);
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/ql/src/java/org/apache/hadoop/hive/ql/Context.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index d1d2789..08bba3d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -355,9 +355,7 @@ public class Context {
if (mkdir) {
try {
- boolean inheritPerms = HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
- if (!FileUtils.mkdir(fs, dir, inheritPerms, conf)) {
+ if (!FileUtils.mkdir(fs, dir, conf)) {
throw new IllegalStateException("Cannot create staging directory '" + dir.toString() + "'");
}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
index cbe0aca..2683f29 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java
@@ -70,8 +70,7 @@ public class CopyTask extends Task<CopyWork> implements Serializable {
}
}
- boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
- if (!FileUtils.mkdir(dstFs, toPath, inheritPerms, conf)) {
+ if (!FileUtils.mkdir(dstFs, toPath, conf)) {
console.printError("Cannot make target directory: " + toPath.toString());
return 2;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
index 1802f37..5cf2c2b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -181,8 +181,8 @@ public class MoveTask extends Task<MoveWork> implements Serializable {
actualPath = actualPath.getParent();
}
fs.mkdirs(mkDirPath);
- if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS)) {
- HdfsUtils.setFullFileStatus(conf, new HdfsUtils.HadoopFileStatus(conf, fs, actualPath), fs, mkDirPath, true);
+ if (FileUtils.shouldInheritPerms(conf, fs)) {
+ FileUtils.inheritPerms(conf, new HdfsUtils.HadoopFileStatus(conf, fs, actualPath), fs, mkDirPath, true);
}
}
return deletePath;
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
index 4686e2c..d2f9e79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java
@@ -106,8 +106,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable {
srcFiles.addAll(Arrays.asList(srcs));
LOG.debug("ReplCopyTask numFiles:" + (srcFiles == null ? "null" : srcFiles.size()));
- boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
- if (!FileUtils.mkdir(dstFs, toPath, inheritPerms, conf)) {
+ if (!FileUtils.mkdir(dstFs, toPath, conf)) {
console.printError("Cannot make target directory: " + toPath.toString());
return 2;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index f64cfda..f11feb7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -2889,8 +2889,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
if (!fullDestStatus.getFileStatus().isDirectory()) {
throw new HiveException(destf + " is not a directory.");
}
- final boolean inheritPerms = HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
+ final boolean inheritPerms = FileUtils.shouldInheritPerms(conf, destFs);
final List<Future<ObjectPair<Path, Path>>> futures = new LinkedList<>();
final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ?
Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25),
@@ -2939,8 +2938,9 @@ private void constructOneLBLocationMap(FileStatus fSta,
Path destPath = mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isRenameAllowed);
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, fullDestStatus, srcGroup, destFs, destPath, false);
+ FileUtils.inheritPerms(conf, fullDestStatus, srcGroup, destFs, destPath, false);
}
+
if (null != newFiles) {
newFiles.add(destPath);
}
@@ -2952,7 +2952,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
}
if (null == pool) {
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, fullDestStatus, null, destFs, destf, true);
+ FileUtils.inheritPerms(conf, fullDestStatus, destFs, destf, true);
}
} else {
pool.shutdown();
@@ -3105,8 +3105,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
}
//needed for perm inheritance.
- final boolean inheritPerms = HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
+ final boolean inheritPerms = FileUtils.shouldInheritPerms(conf, destFs);
HdfsUtils.HadoopFileStatus destStatus = null;
// If source path is a subdirectory of the destination path:
@@ -3118,7 +3117,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
boolean destIsSubDir = isSubDir(srcf, destf, srcFs, destFs, isSrcLocal);
try {
if (inheritPerms || replace) {
- try{
+ try {
destStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf);
//if destf is an existing directory:
//if replace is true, delete followed by rename(mv) is equivalent to replace
@@ -3142,7 +3141,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
// For local src file, copy to hdfs
destFs.copyFromLocalFile(srcf, destf);
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, destStatus, destFs, destf, true);
+ FileUtils.inheritPerms(conf, destStatus, destFs, destf, true);
}
return true;
} else {
@@ -3178,7 +3177,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
final String group = srcStatus.getGroup();
if(destFs.rename(srcStatus.getPath(), destFile)) {
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, desiredStatus, group, destFs, destFile, false);
+ FileUtils.inheritPerms(conf, desiredStatus, group, destFs, destFile, false);
}
} else {
throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest path:"
@@ -3191,7 +3190,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
}
if (null == pool) {
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, desiredStatus, null, destFs, destf, true);
+ FileUtils.inheritPerms(conf, desiredStatus, destFs, destf, true);
}
} else {
pool.shutdown();
@@ -3209,7 +3208,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
} else {
if (destFs.rename(srcf, destf)) {
if (inheritPerms) {
- HdfsUtils.setFullFileStatus(conf, destStatus, destFs, destf, true);
+ FileUtils.inheritPerms(conf, destStatus, destFs, destf, true);
}
return true;
}
@@ -3261,12 +3260,10 @@ private void constructOneLBLocationMap(FileStatus fSta,
*/
static protected void copyFiles(HiveConf conf, Path srcf, Path destf,
FileSystem fs, boolean isSrcLocal, boolean isAcid, List<Path> newFiles) throws HiveException {
- boolean inheritPerms = HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
try {
// create the destination if it does not exist
if (!fs.exists(destf)) {
- FileUtils.mkdir(fs, destf, inheritPerms, conf);
+ FileUtils.mkdir(fs, destf, conf);
}
} catch (IOException e) {
throw new HiveException(
@@ -3460,9 +3457,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
// first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates
// destf with inherited permissions
- boolean inheritPerms = HiveConf.getBoolVar(conf, HiveConf.ConfVars
- .HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS);
- boolean destfExist = FileUtils.mkdir(destFs, destf, inheritPerms, conf);
+ boolean destfExist = FileUtils.mkdir(destFs, destf, conf);
if(!destfExist) {
throw new IOException("Directory " + destf.toString()
+ " does not exist and could not be created.");
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
index 277738f..1b57184 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hive.common.StorageUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,6 +50,7 @@ import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
public class HdfsUtils {
+
private static final Logger LOG = LoggerFactory.getLogger("shims.HdfsUtils");
// TODO: this relies on HDFS not changing the format; we assume if we could get inode ID, this
@@ -74,7 +76,9 @@ public class HdfsUtils {
*/
public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
FileSystem fs, Path target, boolean recursion) {
- setFullFileStatus(conf, sourceStatus, null, fs, target, recursion);
+ if (StorageUtils.shouldSetPerms(conf, fs)) {
+ setFullFileStatus(conf, sourceStatus, null, fs, target, recursion);
+ }
}
/**
@@ -91,7 +95,9 @@ public class HdfsUtils {
*/
public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
String targetGroup, FileSystem fs, Path target, boolean recursion) {
- setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, recursion ? new FsShell() : null);
+ if (StorageUtils.shouldSetPerms(conf, fs)) {
+ setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, recursion ? new FsShell() : null);
+ }
}
@VisibleForTesting
@@ -180,6 +186,7 @@ public class HdfsUtils {
return new AclEntry.Builder().setScope(scope).setType(type)
.setPermission(permission).build();
}
+
/**
* Removes basic permission acls (unamed acls) from the list of acl entries
* @param entries acl entries to remove from.
@@ -201,39 +208,41 @@ public class HdfsUtils {
int retval = shell.run(command);
LOG.debug("Return value is :" + retval);
}
-public static class HadoopFileStatus {
-
- private final FileStatus fileStatus;
- private final AclStatus aclStatus;
-
- public HadoopFileStatus(Configuration conf, FileSystem fs, Path file) throws IOException {
-
- FileStatus fileStatus = fs.getFileStatus(file);
- AclStatus aclStatus = null;
- if (Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true")) {
- //Attempt extended Acl operations only if its enabled, but don't fail the operation regardless.
- try {
- aclStatus = fs.getAclStatus(file);
- } catch (Exception e) {
- LOG.info("Skipping ACL inheritance: File system for path " + file + " " +
- "does not support ACLs but dfs.namenode.acls.enabled is set to true. ");
- LOG.debug("The details are: " + e, e);
+
+ public static class HadoopFileStatus {
+
+ private final FileStatus fileStatus;
+ private final AclStatus aclStatus;
+
+ public HadoopFileStatus(Configuration conf, FileSystem fs, Path file) throws IOException {
+
+ FileStatus fileStatus = fs.getFileStatus(file);
+ AclStatus aclStatus = null;
+ if (Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true")) {
+ //Attempt extended Acl operations only if its enabled, but don't fail the operation regardless.
+ try {
+ aclStatus = fs.getAclStatus(file);
+ } catch (Exception e) {
+ LOG.info("Skipping ACL inheritance: File system for path " + file + " " +
+ "does not support ACLs but dfs.namenode.acls.enabled is set to true. ");
+ LOG.debug("The details are: " + e, e);
+ }
}
- }this.fileStatus = fileStatus;
- this.aclStatus = aclStatus;
- }
+ this.fileStatus = fileStatus;
+ this.aclStatus = aclStatus;
+ }
- public FileStatus getFileStatus() {
- return fileStatus;
- }
+ public FileStatus getFileStatus() {
+ return fileStatus;
+ }
- public List<AclEntry> getAclEntries() {
- return aclStatus == null ? null : Collections.unmodifiableList(aclStatus.getEntries());
- }
+ public List<AclEntry> getAclEntries() {
+ return aclStatus == null ? null : Collections.unmodifiableList(aclStatus.getEntries());
+ }
- @VisibleForTesting
- AclStatus getAclStatus() {
- return this.aclStatus;
+ @VisibleForTesting
+ AclStatus getAclStatus() {
+ return this.aclStatus;
+ }
}
}
-}
http://git-wip-us.apache.org/repos/asf/hive/blob/cce4d5e7/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java b/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
index 86a132c..79507e4 100644
--- a/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
+++ b/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.io;
import java.io.IOException;
+import java.net.URI;
import java.util.ArrayList;
import java.util.List;
@@ -31,11 +32,14 @@ import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.junit.Test;
import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyList;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -181,4 +185,19 @@ public class TestHdfsUtils {
true, mockFsShell);
verify(mockFsShell).run(new String[]{"-chmod", "-R", any(String.class), fakeTarget.toString()});
}
+
+ @Test
+ public void testSkipSetFullFileStatusIfBlobStore() throws IOException {
+ Configuration conf = new Configuration();
+ conf.set(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname, "s3a");
+ FileSystem fs = mock(FileSystem.class);
+ when(fs.getUri()).thenReturn(URI.create("s3a:///"));
+ HdfsUtils.setFullFileStatus(conf, null, null, fs, null, false);
+
+ verify(fs, never()).getFileStatus(any(Path.class));
+ verify(fs, never()).listStatus(any(Path[].class));
+ verify(fs, never()).setPermission(any(Path.class), any(FsPermission.class));
+ verify(fs, never()).setAcl(any(Path.class), anyList());
+ verify(fs, never()).setOwner(any(Path.class), any(String.class), any(String.class));
+ }
}