You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wh...@apache.org on 2014/02/13 06:51:29 UTC
svn commit: r1567850 - in
/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src:
main/java/org/apache/hadoop/fs/shell/Ls.java
test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
Author: wheat9
Date: Thu Feb 13 05:51:28 2014
New Revision: 1567850
URL: http://svn.apache.org/r1567850
Log:
HDFS-5932. Ls should display the ACL bit. Contributed by Chris Nauroth.
Modified:
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java?rev=1567850&r1=1567849&r2=1567850&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java Thu Feb 13 05:51:28 2014
@@ -19,15 +19,22 @@
package org.apache.hadoop.fs.shell;
import java.io.IOException;
+import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedList;
+import java.util.Set;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.RpcNoSuchMethodException;
+
+import com.google.common.collect.Sets;
/**
* Get a listing of all files in that match the file patterns.
@@ -66,6 +73,8 @@ class Ls extends FsCommand {
protected boolean dirRecurse;
protected boolean humanReadable = false;
+ private Set<URI> aclNotSupportedFsSet = Sets.newHashSet();
+
protected String formatSize(long size) {
return humanReadable
? StringUtils.TraditionalBinaryPrefix.long2String(size, "", 1)
@@ -108,7 +117,7 @@ class Ls extends FsCommand {
FileStatus stat = item.stat;
String line = String.format(lineFormat,
(stat.isDirectory() ? "d" : "-"),
- stat.getPermission(),
+ stat.getPermission() + (hasAcl(item) ? "+" : ""),
(stat.isFile() ? stat.getReplication() : "-"),
stat.getOwner(),
stat.getGroup(),
@@ -146,6 +155,49 @@ class Ls extends FsCommand {
lineFormat = fmt.toString();
}
+ /**
+ * Calls getAclStatus to determine if the given item has an ACL. For
+ * compatibility, this method traps errors caused by the RPC method missing
+ * from the server side. This would happen if the client was connected to an
+ * old NameNode that didn't have the ACL APIs. This method also traps the
+ * case of the client-side FileSystem not implementing the ACL APIs.
+ * FileSystem instances that do not support ACLs are remembered. This
+ * prevents the client from sending multiple failing RPC calls during a
+ * recursive ls.
+ *
+ * @param item PathData item to check
+ * @return boolean true if item has an ACL
+ * @throws IOException if there is a failure
+ */
+ private boolean hasAcl(PathData item) throws IOException {
+ FileSystem fs = item.fs;
+ if (aclNotSupportedFsSet.contains(fs.getUri())) {
+ // This FileSystem failed to run the ACL API in an earlier iteration.
+ return false;
+ }
+ try {
+ return !fs.getAclStatus(item.path).getEntries().isEmpty();
+ } catch (RemoteException e) {
+ // If this is a RpcNoSuchMethodException, then the client is connected to
+ // an older NameNode that doesn't support ACLs. Keep going.
+ IOException e2 = e.unwrapRemoteException(RpcNoSuchMethodException.class);
+ if (!(e2 instanceof RpcNoSuchMethodException)) {
+ throw e;
+ }
+ } catch (IOException e) {
+ // The NameNode supports ACLs, but they are not enabled. Keep going.
+ String message = e.getMessage();
+ if (message != null && !message.contains("ACLs has been disabled")) {
+ throw e;
+ }
+ } catch (UnsupportedOperationException e) {
+ // The underlying FileSystem doesn't implement ACLs. Keep going.
+ }
+ // Remember that this FileSystem cannot support ACLs.
+ aclNotSupportedFsSet.add(fs.getUri());
+ return false;
+ }
+
private int maxLength(int n, Object value) {
return Math.max(n, (value != null) ? String.valueOf(value).length() : 0);
}
Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java?rev=1567850&r1=1567849&r2=1567850&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java Thu Feb 13 05:51:28 2014
@@ -20,15 +20,27 @@ package org.apache.hadoop.fs.shell;
import static org.junit.Assert.*;
import java.io.IOException;
+import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclEntryScope;
import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.RpcNoSuchMethodException;
+import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Before;
import org.junit.Test;
@@ -128,6 +140,97 @@ public class TestAclCommands {
assertEquals("Parsed Acl not correct", expectedList, parsedList);
}
+ @Test
+ public void testLsNoRpcForGetAclStatus() throws Exception {
+ Configuration conf = new Configuration();
+ conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///");
+ conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class);
+ conf.setBoolean("stubfs.noRpcForGetAclStatus", true);
+ assertEquals("ls must succeed even if getAclStatus RPC does not exist.",
+ 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }));
+ }
+
+ @Test
+ public void testLsAclsUnsupported() throws Exception {
+ Configuration conf = new Configuration();
+ conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///");
+ conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class);
+ assertEquals("ls must succeed even if FileSystem does not implement ACLs.",
+ 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }));
+ }
+
+ public static class StubFileSystem extends FileSystem {
+
+ public FSDataOutputStream append(Path f, int bufferSize,
+ Progressable progress) throws IOException {
+ return null;
+ }
+
+ public FSDataOutputStream create(Path f, FsPermission permission,
+ boolean overwrite, int bufferSize, short replication, long blockSize,
+ Progressable progress) throws IOException {
+ return null;
+ }
+
+ @Override
+ public boolean delete(Path f, boolean recursive) throws IOException {
+ return false;
+ }
+
+ public AclStatus getAclStatus(Path path) throws IOException {
+ if (getConf().getBoolean("stubfs.noRpcForGetAclStatus", false)) {
+ throw new RemoteException(RpcNoSuchMethodException.class.getName(),
+ "test exception");
+ }
+ return super.getAclStatus(path);
+ }
+
+ @Override
+ public FileStatus getFileStatus(Path f) throws IOException {
+ return null;
+ }
+
+ @Override
+ public URI getUri() {
+ return URI.create("stubfs:///");
+ }
+
+ @Override
+ public Path getWorkingDirectory() {
+ return null;
+ }
+
+ @Override
+ public FileStatus[] listStatus(Path f) throws IOException {
+ FsPermission perm = new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE,
+ FsAction.READ_EXECUTE);
+ Path path = new Path("/foo");
+ FileStatus stat = new FileStatus(1000, true, 3, 1000, 0, 0, perm, "owner",
+ "group", path);
+ return new FileStatus[] { stat };
+ }
+
+ @Override
+ public boolean mkdirs(Path f, FsPermission permission)
+ throws IOException {
+ return false;
+ }
+
+ @Override
+ public FSDataInputStream open(Path f, int bufferSize) throws IOException {
+ return null;
+ }
+
+ @Override
+ public boolean rename(Path src, Path dst) throws IOException {
+ return false;
+ }
+
+ @Override
+ public void setWorkingDirectory(Path dir) {
+ }
+ }
+
private int runCommand(String[] commands) throws Exception {
return ToolRunner.run(conf, new FsShell(), commands);
}