You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by mo...@apache.org on 2017/07/13 05:20:29 UTC

zeppelin git commit: [ZEPPELIN-2730] compare with maxLength after getting all file status

Repository: zeppelin
Updated Branches:
  refs/heads/master 985b86e4d -> 278f7185e


[ZEPPELIN-2730]  compare with maxLength after getting all file status

### What is this PR for?
when set hdfs.maxLength to a lower value like 10, get result no such file or directory

### What type of PR is it?
[Bug Fix]

### Todos
* [ ] - Task

### What is the Jira issue?
* Open an issue on Jira https://issues.apache.org/jira/browse/ZEPPELIN/
* https://issues.apache.org/jira/browse/ZEPPELIN-2730

### How should this be tested?
set hdfs.maxLength to 10, if it can get right result

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: gaoyang <ga...@cmss.chinamobile.com>

Closes #2467 from gaoyang143/zeppelin-2730 and squashes the following commits:

092b314 [gaoyang] add a line
c43e90b [gaoyang] add maxLength test in HDFSFileInterpreterTest
e8ef06c [gaoyang] change max length of file status
dd266de [gaoyang] delete useless code
dcb8648 [gaoyang] compare with maxLength after getting all file status


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/278f7185
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/278f7185
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/278f7185

Branch: refs/heads/master
Commit: 278f7185e184ed3721cc1a807c9dd0bd0971ef81
Parents: 985b86e
Author: gaoyang <ga...@cmss.chinamobile.com>
Authored: Tue Jul 11 09:35:27 2017 +0800
Committer: Lee moon soo <mo...@apache.org>
Committed: Thu Jul 13 14:20:24 2017 +0900

----------------------------------------------------------------------
 .../org/apache/zeppelin/file/HDFSCommand.java   | 17 ++-----
 .../zeppelin/file/HDFSFileInterpreter.java      |  6 ++-
 .../zeppelin/file/HDFSFileInterpreterTest.java  | 50 +++++++++++++++-----
 3 files changed, 49 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/278f7185/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java b/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
index 94508cd..a097b88 100644
--- a/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
+++ b/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
@@ -134,21 +134,14 @@ public class HDFSCommand {
       int responseCode = con.getResponseCode();
       logger.info("Sending 'GET' request to URL : " + hdfsUrl);
       logger.info("Response Code : " + responseCode);
-
-      BufferedReader in = new BufferedReader(
-          new InputStreamReader(con.getInputStream()));
-      String inputLine;
       StringBuffer response = new StringBuffer();
-
-      int i = 0;
-      while ((inputLine = in.readLine()) != null) {
-        if (inputLine.length() < maxLength)
+      try (BufferedReader in = new BufferedReader(
+              new InputStreamReader(con.getInputStream()));) {
+        String inputLine;
+        while ((inputLine = in.readLine()) != null) {
           response.append(inputLine);
-        i++;
-        if (i >= maxLength)
-          break;
+        }
       }
-      in.close();
       return response.toString();
     }
     return null;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/278f7185/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
index c4a1730..244101c 100644
--- a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
+++ b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
@@ -217,8 +217,12 @@ public class HDFSFileInterpreter extends FileInterpreter {
                   allFiles.FileStatuses != null &&
                   allFiles.FileStatuses.FileStatus != null)
           {
-            for (OneFileStatus fs : allFiles.FileStatuses.FileStatus)
+            int length = cmd.maxLength < allFiles.FileStatuses.FileStatus.length ? cmd.maxLength :
+                    allFiles.FileStatuses.FileStatus.length;
+            for (int index = 0; index < length; index++) {
+              OneFileStatus fs = allFiles.FileStatuses.FileStatus[index];
               all = all + listOne(path, fs) + '\n';
+            }
           }
         }
         return all;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/278f7185/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
----------------------------------------------------------------------
diff --git a/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java b/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
index 335693f..adc9bd6 100644
--- a/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
+++ b/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
@@ -43,6 +43,27 @@ import java.lang.String;
 public class HDFSFileInterpreterTest extends TestCase {
 
     @Test
+    public void testMaxLength() {
+
+      HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
+      t.open();
+      InterpreterResult result = t.interpret("ls -l /", null);
+      String lineSeparator = "\n";
+      int fileStatusLength = MockFileSystem.fileStatuses.split(lineSeparator).length;
+      assertEquals(result.message().get(0).getData().split(lineSeparator).length, fileStatusLength);
+      t.close();
+
+      Properties properties = new Properties();
+      final int maxLength = fileStatusLength - 2;
+      properties.setProperty("hdfs.maxlength", String.valueOf(maxLength));
+      HDFSFileInterpreter t1 = new MockHDFSFileInterpreter(properties);
+      t1.open();
+      InterpreterResult result1 = t1.interpret("ls -l /", null);
+      assertEquals(result1.message().get(0).getData().split(lineSeparator).length, maxLength);
+      t1.close();
+    }
+
+    @Test
     public void test() {
       HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
       t.open();
@@ -126,16 +147,17 @@ public class HDFSFileInterpreterTest extends TestCase {
    */
   class MockFileSystem {
     HashMap<String, String> mfs = new HashMap<>();
+    static final String fileStatuses =
+            "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16389,\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1438548219672,\"owner\":\"yarn\",\"pathSuffix\":\"app-logs\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16395,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548030045,\"owner\":\"hdfs\",\"pathSuffix\":\"hdp\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16390,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985336,\"owner\":\"mapred\",\"pathSuffix\":\"mapred\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":2,\"fileId\":16392,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985346,\"owner\":\"hdfs\",\"pathSuffix\":\"mr-history\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16400,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725,\"owner\":\"hdfs\",\"pathSuffix\":\"system\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548150089,\"owner\":\"hdfs\",\"pathSuffix\":\"tmp\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+                    "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547921792,\"owner\":\"hdfs\",\"pathSuffix\":\"user\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n";
     void addListStatusData() {
       mfs.put("/?op=LISTSTATUS",
-          "{\"FileStatuses\":{\"FileStatus\":[\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16389,\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1438548219672,\"owner\":\"yarn\",\"pathSuffix\":\"app-logs\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16395,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548030045,\"owner\":\"hdfs\",\"pathSuffix\":\"hdp\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16390,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985336,\"owner\":\"mapred\",\"pathSuffix\":\"mapred\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":2,\"fileId\":16392,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985346,\"owner\":\"hdfs\",\"pathSuffix\":\"mr-history\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16400,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725,\"owner\":\"hdfs\",\"pathSuffix\":\"system\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548150089,\"owner\":\"hdfs\",\"pathSuffix\":\"tmp\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
-              "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547921792,\"owner\":\"hdfs\",\"pathSuffix\":\"user\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n" +
+          "{\"FileStatuses\":{\"FileStatus\":[\n" + fileStatuses +
               "]}}"
       );
       mfs.put("/user?op=LISTSTATUS",
@@ -183,13 +205,17 @@ public class HDFSFileInterpreterTest extends TestCase {
   class MockHDFSCommand extends HDFSCommand {
     MockFileSystem fs = null;
 
-    public MockHDFSCommand(String url, String user, Logger logger) {
-      super(url, user, logger, 1000);
+    public MockHDFSCommand(String url, String user, Logger logger, int maxLength) {
+      super(url, user, logger, maxLength);
       fs = new MockFileSystem();
       fs.addMockData(getFileStatus);
       fs.addMockData(listStatus);
     }
 
+    public MockHDFSCommand(String url, String user, Logger logger) {
+      this(url, user, logger, 1000);
+    }
+
     @Override
     public String runCommand(Op op, String path, Arg[] args) throws Exception {
 
@@ -215,7 +241,9 @@ public class HDFSFileInterpreterTest extends TestCase {
     @Override
     public void prepare() {
       // Run commands against mock File System instead of WebHDFS
-      cmd = new MockHDFSCommand("", "", logger);
+      int i = Integer.parseInt(getProperty(HDFS_MAXLENGTH) == null ? "1000"
+              : getProperty(HDFS_MAXLENGTH));
+      cmd = new MockHDFSCommand("", "", logger, i);
       gson = new Gson();
     }