You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by jo...@apache.org on 2017/01/14 16:38:29 UTC
zeppelin git commit: [MINOR] Remove unused import. Naming convention.
Repository: zeppelin
Updated Branches:
refs/heads/master cb8e41870 -> 69bc353d3
[MINOR] Remove unused import. Naming convention.
### What is this PR for?
Change method name (Method names should comply with a naming convention). Remove unused imports. Using append makes code simpler to read `sb.append("\nAccessTime = ").append(accessTime);` instead `sb.append("\nAccessTime = " + accessTime);`
### What type of PR is it?
[Refactoring]
### Todos
* [ ] - Task
### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-1839
### How should this be tested?
HDFSFileInterpreterTest.java (still working)
### Questions:
* Does the licenses files need update? (no)
* Is there breaking changes for older versions? (no)
* Does this needs documentation? (no)
Remove this unused import, naming convention
Author: Unknown <So...@gmail.com>
Closes #1747 from bitchelov/hdfsFileInterpreterSmallChanges and squashes the following commits:
6d27bb8 [Unknown] Minor changes
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/69bc353d
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/69bc353d
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/69bc353d
Branch: refs/heads/master
Commit: 69bc353d3c047ad8ad29584ce9e3b6c84d7ace7e
Parents: cb8e418
Author: Unknown <So...@gmail.com>
Authored: Mon Dec 12 17:14:12 2016 +0300
Committer: Jongyoul Lee <jo...@apache.org>
Committed: Sun Jan 15 01:38:22 2017 +0900
----------------------------------------------------------------------
.../zeppelin/file/HDFSFileInterpreter.java | 34 +++++++++-----------
1 file changed, 16 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/69bc353d/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
index c2caa11..1b2b01c 100644
--- a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
+++ b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
@@ -23,9 +23,7 @@ import java.util.*;
import com.google.gson.Gson;
import org.apache.commons.lang.StringUtils;
-import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterException;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
/**
@@ -75,19 +73,19 @@ public class HDFSFileInterpreter extends FileInterpreter {
public String type;
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("\nAccessTime = " + accessTime);
- sb.append("\nBlockSize = " + blockSize);
- sb.append("\nChildrenNum = " + childrenNum);
- sb.append("\nFileId = " + fileId);
- sb.append("\nGroup = " + group);
- sb.append("\nLength = " + length);
- sb.append("\nModificationTime = " + modificationTime);
- sb.append("\nOwner = " + owner);
- sb.append("\nPathSuffix = " + pathSuffix);
- sb.append("\nPermission = " + permission);
- sb.append("\nReplication = " + replication);
- sb.append("\nStoragePolicy = " + storagePolicy);
- sb.append("\nType = " + type);
+ sb.append("\nAccessTime = ").append(accessTime);
+ sb.append("\nBlockSize = ").append(blockSize);
+ sb.append("\nChildrenNum = ").append(childrenNum);
+ sb.append("\nFileId = ").append(fileId);
+ sb.append("\nGroup = ").append(group);
+ sb.append("\nLength = ").append(length);
+ sb.append("\nModificationTime = ").append(modificationTime);
+ sb.append("\nOwner = ").append(owner);
+ sb.append("\nPathSuffix = ").append(pathSuffix);
+ sb.append("\nPermission = ").append(permission);
+ sb.append("\nReplication = ").append(replication);
+ sb.append("\nStoragePolicy = ").append(storagePolicy);
+ sb.append("\nType = ").append(type);
return sb.toString();
}
}
@@ -162,7 +160,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
private String listDate(OneFileStatus fs) {
return new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(fs.modificationTime));
}
- private String ListOne(String path, OneFileStatus fs) {
+ private String listOne(String path, OneFileStatus fs) {
if (args.flags.contains(new Character('l'))) {
StringBuilder sb = new StringBuilder();
sb.append(listPermission(fs) + "\t");
@@ -194,7 +192,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
String str = cmd.runCommand(cmd.getFileStatus, filePath, null);
SingleFileStatus sfs = gson.fromJson(str, SingleFileStatus.class);
if (sfs != null) {
- return ListOne(filePath, sfs.FileStatus);
+ return listOne(filePath, sfs.FileStatus);
}
} catch (Exception e) {
logger.error("listFile: " + filePath, e);
@@ -218,7 +216,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
allFiles.FileStatuses.FileStatus != null)
{
for (OneFileStatus fs : allFiles.FileStatuses.FileStatus)
- all = all + ListOne(path, fs) + '\n';
+ all = all + listOne(path, fs) + '\n';
}
}
return all;