You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ae...@apache.org on 2016/06/19 00:33:10 UTC
[31/39] hadoop git commit: HADOOP-12943. Add -w -r options in dfs
-test command. Contributed by Weiwei Yang.
HADOOP-12943. Add -w -r options in dfs -test command. Contributed by Weiwei Yang.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/09e82aca
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/09e82aca
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/09e82aca
Branch: refs/heads/HDFS-1312
Commit: 09e82acaf9a6d7663bc51bbca0cdeca4b582b535
Parents: 51d16e7
Author: Akira Ajisaka <aa...@apache.org>
Authored: Fri Jun 17 16:16:44 2016 +0900
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Fri Jun 17 16:20:38 2016 +0900
----------------------------------------------------------------------
.../java/org/apache/hadoop/fs/shell/Test.java | 80 +++++++++++++-------
.../src/site/markdown/FileSystemShell.md | 3 +
.../org/apache/hadoop/hdfs/TestDFSShell.java | 71 ++++++++++++++++-
3 files changed, 126 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/09e82aca/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
index 9984cf2..a2d2529 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
@@ -18,11 +18,14 @@
package org.apache.hadoop.fs.shell;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.LinkedList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.security.AccessControlException;
/**
* Perform shell-like file tests
@@ -38,18 +41,25 @@ class Test extends FsCommand {
public static final String NAME = "test";
public static final String USAGE = "-[defsz] <path>";
public static final String DESCRIPTION =
- "Answer various questions about <path>, with result via exit status.\n" +
- " -d return 0 if <path> is a directory.\n" +
- " -e return 0 if <path> exists.\n" +
- " -f return 0 if <path> is a file.\n" +
- " -s return 0 if file <path> is greater than zero bytes in size.\n" +
- " -z return 0 if file <path> is zero bytes in size, else return 1.";
+ "Answer various questions about <path>, with result via exit status.\n"
+ + " -d return 0 if <path> is a directory.\n"
+ + " -e return 0 if <path> exists.\n"
+ + " -f return 0 if <path> is a file.\n"
+ + " -s return 0 if file <path> is greater "
+ + " than zero bytes in size.\n"
+ + " -w return 0 if file <path> exists "
+ + " and write permission is granted.\n"
+ + " -r return 0 if file <path> exists "
+ + " and read permission is granted.\n"
+ + " -z return 0 if file <path> is "
+ + " zero bytes in size, else return 1.";
private char flag;
@Override
protected void processOptions(LinkedList<String> args) {
- CommandFormat cf = new CommandFormat(1, 1, "e", "d", "f", "s", "z");
+ CommandFormat cf = new CommandFormat(1, 1,
+ "e", "d", "f", "s", "z", "w", "r");
cf.parse(args);
String[] opts = cf.getOpts().toArray(new String[0]);
@@ -68,29 +78,47 @@ class Test extends FsCommand {
protected void processPath(PathData item) throws IOException {
boolean test = false;
switch (flag) {
- case 'e':
- test = true;
- break;
- case 'd':
- test = item.stat.isDirectory();
- break;
- case 'f':
- test = item.stat.isFile();
- break;
- case 's':
- test = (item.stat.getLen() > 0);
- break;
- case 'z':
- test = (item.stat.getLen() == 0);
- break;
- default:
- break;
+ case 'e':
+ test = true;
+ break;
+ case 'd':
+ test = item.stat.isDirectory();
+ break;
+ case 'f':
+ test = item.stat.isFile();
+ break;
+ case 's':
+ test = (item.stat.getLen() > 0);
+ break;
+ case 'z':
+ test = (item.stat.getLen() == 0);
+ break;
+ case 'w':
+ test = testAccess(item, FsAction.WRITE);
+ break;
+ case 'r':
+ test = testAccess(item, FsAction.READ);
+ break;
+ default:
+ break;
+ }
+ if (!test) {
+ exitCode = 1;
+ }
+ }
+
+ private boolean testAccess(PathData item, FsAction action)
+ throws IOException {
+ try {
+ item.fs.access(item.path, action);
+ return true;
+ } catch (AccessControlException | FileNotFoundException e) {
+ return false;
}
- if (!test) exitCode = 1;
}
@Override
protected void processNonexistentPath(PathData item) throws IOException {
exitCode = 1;
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/09e82aca/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
index 1723426..066cfe3 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
@@ -669,8 +669,11 @@ Options:
* -e: if the path exists, return 0.
* -f: if the path is a file, return 0.
* -s: if the path is not empty, return 0.
+* -r: if the path exists and read permission is granted, return 0.
+* -w: if the path exists and write permission is granted, return 0.
* -z: if the file is zero length, return 0.
+
Example:
* `hadoop fs -test -e filename`
http://git-wip-us.apache.org/repos/asf/hadoop/blob/09e82aca/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index 24d8b90..9cae762 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -1179,8 +1179,8 @@ public class TestDFSShell {
* Tests various options of DFSShell.
*/
@Test (timeout = 120000)
- public void testDFSShell() throws IOException {
- Configuration conf = new HdfsConfiguration();
+ public void testDFSShell() throws Exception {
+ final Configuration conf = new HdfsConfiguration();
/* This tests some properties of ChecksumFileSystem as well.
* Make sure that we create ChecksumDFS */
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
@@ -1532,6 +1532,73 @@ public class TestDFSShell {
assertEquals(0, val);
}
+ // Verify -test -w/-r
+ {
+ Path permDir = new Path("/test/permDir");
+ Path permFile = new Path("/test/permDir/permFile");
+ mkdir(fs, permDir);
+ writeFile(fs, permFile);
+
+ // Verify -test -w positive case (dir exists and can write)
+ final String[] wargs = new String[3];
+ wargs[0] = "-test";
+ wargs[1] = "-w";
+ wargs[2] = permDir.toString();
+ int val = -1;
+ try {
+ val = shell.run(wargs);
+ } catch (Exception e) {
+ System.err.println("Exception raised from DFSShell.run " +
+ e.getLocalizedMessage());
+ }
+ assertEquals(0, val);
+
+ // Verify -test -r positive case (file exists and can read)
+ final String[] rargs = new String[3];
+ rargs[0] = "-test";
+ rargs[1] = "-r";
+ rargs[2] = permFile.toString();
+ try {
+ val = shell.run(rargs);
+ } catch (Exception e) {
+ System.err.println("Exception raised from DFSShell.run " +
+ e.getLocalizedMessage());
+ }
+ assertEquals(0, val);
+
+ // Verify -test -r negative case (file exists but cannot read)
+ runCmd(shell, "-chmod", "600", permFile.toString());
+
+ UserGroupInformation smokeUser =
+ UserGroupInformation.createUserForTesting("smokeUser",
+ new String[] {"hadoop"});
+ smokeUser.doAs(new PrivilegedExceptionAction<String>() {
+ @Override
+ public String run() throws Exception {
+ FsShell shell = new FsShell(conf);
+ int exitCode = shell.run(rargs);
+ assertEquals(1, exitCode);
+ return null;
+ }
+ });
+
+ // Verify -test -w negative case (dir exists but cannot write)
+ runCmd(shell, "-chown", "-R", "not_allowed", permDir.toString());
+ runCmd(shell, "-chmod", "-R", "700", permDir.toString());
+
+ smokeUser.doAs(new PrivilegedExceptionAction<String>() {
+ @Override
+ public String run() throws Exception {
+ FsShell shell = new FsShell(conf);
+ int exitCode = shell.run(wargs);
+ assertEquals(1, exitCode);
+ return null;
+ }
+ });
+
+ // cleanup
+ fs.delete(permDir, true);
+ }
} finally {
try {
fileSys.close();
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org