You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2015/03/02 06:11:14 UTC

hadoop git commit: HADOOP-11657. Align the output of `hadoop fs -du` to be more Unix-like. (aajisaka)

Repository: hadoop
Updated Branches:
  refs/heads/trunk e9ac88aac -> 30e73ebc7


HADOOP-11657. Align the output of `hadoop fs -du` to be more Unix-like. (aajisaka)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/30e73ebc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/30e73ebc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/30e73ebc

Branch: refs/heads/trunk
Commit: 30e73ebc77654ff941bcae5b6fb11d52c6d74d2e
Parents: e9ac88a
Author: Akira Ajisaka <aa...@apache.org>
Authored: Sun Mar 1 21:09:15 2015 -0800
Committer: Akira Ajisaka <aa...@apache.org>
Committed: Sun Mar 1 21:09:15 2015 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../org/apache/hadoop/fs/shell/FsUsage.java     | 12 ++++++--
 .../org/apache/hadoop/hdfs/TestDFSShell.java    | 29 ++++++++++++++++++++
 3 files changed, 42 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/30e73ebc/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index f1d48bc..b1a7a7d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -13,6 +13,9 @@ Trunk (Unreleased)
 
     HADOOP-10950. rework heap management vars (John Smith via aw)
 
+    HADOOP-11657. Align the output of `hadoop fs -du` to be more Unix-like.
+    (aajisaka)
+
   NEW FEATURES
 
     HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via aw)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/30e73ebc/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
index 5c1dbf0..765b181 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java
@@ -132,15 +132,23 @@ class FsUsage extends FsCommand {
     }
 
     @Override
-    protected void processPathArgument(PathData item) throws IOException {
+    protected void processArguments(LinkedList<PathData> args)
+        throws IOException {
       usagesTable = new TableBuilder(3);
+      super.processArguments(args);
+      if (!usagesTable.isEmpty()) {
+        usagesTable.printToStream(out);
+      }
+    }
+
+    @Override
+    protected void processPathArgument(PathData item) throws IOException {
       // go one level deep on dirs from cmdline unless in summary mode
       if (!summary && item.stat.isDirectory()) {
         recursePath(item);
       } else {
         super.processPathArgument(item);
       }
-      usagesTable.printToStream(out);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/30e73ebc/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index ee04076..0a88208 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -95,6 +95,14 @@ public class TestDFSShell {
     return f;
   }
 
+  static Path writeByte(FileSystem fs, Path f) throws IOException {
+    DataOutputStream out = fs.create(f);
+    out.writeByte(1);
+    out.close();
+    assertTrue(fs.exists(f));
+    return f;
+  }
+
   static Path mkdir(FileSystem fs, Path p) throws IOException {
     assertTrue(fs.mkdirs(p));
     assertTrue(fs.exists(p));
@@ -272,6 +280,27 @@ public class TestDFSShell {
       Long combinedDiskUsed = myFileDiskUsed + myFile2DiskUsed;
       assertThat(returnString, containsString(combinedLength.toString()));
       assertThat(returnString, containsString(combinedDiskUsed.toString()));
+
+      // Check if output is rendered properly with multiple input paths
+      Path myFile3 = new Path("/test/dir/file3");
+      writeByte(fs, myFile3);
+      assertTrue(fs.exists(myFile3));
+      args = new String[3];
+      args[0] = "-du";
+      args[1] = "/test/dir/file3";
+      args[2] = "/test/dir/file2";
+      val = -1;
+      try {
+        val = shell.run(args);
+      } catch (Exception e) {
+        System.err.println("Exception raised from DFSShell.run " +
+            e.getLocalizedMessage());
+      }
+      assertEquals("Return code should be 0.", 0, val);
+      returnString = out.toString();
+      out.reset();
+      assertTrue(returnString.contains("1   2   /test/dir/file3"));
+      assertTrue(returnString.contains("23  46  /test/dir/file2"));
     } finally {
       System.setOut(psBackup);
       cluster.shutdown();