You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by we...@apache.org on 2019/04/02 19:25:10 UTC

[hadoop] branch trunk updated: HDFS-13960. hdfs dfs -checksum command should optionally show block size in output. Contributed by Lokesh Jain.

This is an automated email from the ASF dual-hosted git repository.

weichiu pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new cf26811  HDFS-13960. hdfs dfs -checksum command should optionally show block size in output. Contributed by Lokesh Jain.
cf26811 is described below

commit cf268114c9af2e33f35d0c24b57e31ef4d5e8353
Author: Lokesh Jain <lj...@apache.org>
AuthorDate: Tue Apr 2 12:23:18 2019 -0700

    HDFS-13960. hdfs dfs -checksum command should optionally show block size in output. Contributed by Lokesh Jain.
    
    Signed-off-by: Wei-Chiu Chuang <we...@apache.org>
---
 .../java/org/apache/hadoop/fs/shell/Display.java   | 27 +++++++++++++++-------
 .../src/site/markdown/FileSystemShell.md           |  6 ++++-
 .../hadoop-common/src/test/resources/testConf.xml  |  2 +-
 .../java/org/apache/hadoop/hdfs/TestDFSShell.java  | 26 +++++++++++++++++++++
 4 files changed, 51 insertions(+), 10 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
index d3b9a21..670fa15 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
@@ -175,7 +175,7 @@ class Display extends FsCommand {
   
   public static class Checksum extends Display {
     public static final String NAME = "checksum";
-    public static final String USAGE = "<src> ...";
+    public static final String USAGE = "[-v] <src> ...";
     public static final String DESCRIPTION =
       "Dump checksum information for files that match the file " +
       "pattern <src> to stdout. Note that this requires a round-trip " +
@@ -184,6 +184,16 @@ class Display extends FsCommand {
       "file depends on its content, block size and the checksum " +
       "algorithm and parameters used for creating the file.";
 
+    private boolean displayBlockSize;
+
+    @Override
+    protected void processOptions(LinkedList<String> args)
+        throws IOException {
+      CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE, "v");
+      cf.parse(args);
+      displayBlockSize = cf.getOpt("v");
+    }
+
     @Override
     protected void processPath(PathData item) throws IOException {
       if (item.stat.isDirectory()) {
@@ -191,14 +201,15 @@ class Display extends FsCommand {
       }
 
       FileChecksum checksum = item.fs.getFileChecksum(item.path);
-      if (checksum == null) {
-        out.printf("%s\tNONE\t%n", item.toString());
+      String outputChecksum = checksum == null ? "NONE" :
+          String.format("%s\t%s", checksum.getAlgorithmName(), StringUtils
+              .byteToHexString(checksum.getBytes(), 0, checksum.getLength()));
+      if (displayBlockSize) {
+        FileStatus fileStatus = item.fs.getFileStatus(item.path);
+        out.printf("%s\t%s\tBlockSize=%s%n", item.toString(), outputChecksum,
+            fileStatus != null ? fileStatus.getBlockSize() : "NONE");
       } else {
-        String checksumString = StringUtils.byteToHexString(
-            checksum.getBytes(), 0, checksum.getLength());
-        out.printf("%s\t%s\t%s%n",
-            item.toString(), checksum.getAlgorithmName(),
-            checksumString);
+        out.printf("%s\t%s%n", item.toString(), outputChecksum);
       }
     }
   }
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
index f4a37ea..f050e30 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
@@ -73,10 +73,14 @@ Returns 0 on success and -1 on error.
 checksum
 --------
 
-Usage: `hadoop fs -checksum URI`
+Usage: `hadoop fs -checksum [-v] URI`
 
 Returns the checksum information of a file.
 
+Options
+
+* The `-v` option displays blocks size for the file.
+
 Example:
 
 * `hadoop fs -checksum hdfs://nn1.example.com/file1`
diff --git a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index 29a88fc..e38c259 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -714,7 +714,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^-checksum &lt;src&gt; \.\.\. :\s*</expected-output>
+          <expected-output>^-checksum \[-v\] &lt;src&gt; \.\.\. :\s*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index 5266fe4..76179bc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -71,6 +71,7 @@ import org.junit.rules.Timeout;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;
+import org.junit.Assert;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
 import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS;
@@ -1122,6 +1123,31 @@ public class TestDFSShell {
   }
 
   @Test (timeout = 30000)
+  public void testChecksum() throws Exception {
+    PrintStream printStream = System.out;
+    try {
+      ByteArrayOutputStream out = new ByteArrayOutputStream();
+      System.setOut(new PrintStream(out));
+      FsShell shell = new FsShell(dfs.getConf());
+      final Path filePath = new Path("/testChecksum/file1");
+      writeFile(dfs, filePath);
+      FileStatus fileStatus = dfs.getFileStatus(filePath);
+      FileChecksum checksum = dfs.getFileChecksum(filePath);
+      String[] args = {"-checksum", "-v", filePath.toString()};
+      assertEquals(0, shell.run(args));
+      // verify block size is printed in the output
+      assertTrue(out.toString()
+          .contains(String.format("BlockSize=%s", fileStatus.getBlockSize())));
+      // verify checksum is printed in the output
+      assertTrue(out.toString().contains(StringUtils
+          .byteToHexString(checksum.getBytes(), 0, checksum.getLength())));
+    } finally {
+      Assert.assertNotNull(printStream);
+      System.setOut(printStream);
+    }
+  }
+
+  @Test (timeout = 30000)
   public void testCopyToLocal() throws IOException {
     FsShell shell = new FsShell(dfs.getConf());
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org