You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2018/02/14 04:35:12 UTC
zeppelin git commit: ZEPPELIN-3153. Fixed Checkstyle errors and
warnings in the file module
Repository: zeppelin
Updated Branches:
refs/heads/master 3418055cc -> e89f10278
ZEPPELIN-3153. Fixed Checkstyle errors and warnings in the file module
### What is this PR for?
Fixed the Checkstyle errors and warnings in the file module.
### What type of PR is it?
Improvement
### Todos
* [ ] - Task
### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-3153
### How should this be tested?
* CI pass
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no
Author: Jan Hentschel <ja...@ultratendency.com>
Closes #2787 from HorizonNet/ZEPPELIN-3153 and squashes the following commits:
9b2c6fb [Jan Hentschel] ZEPPELIN-3153. Fixed Checkstyle errors and warnings in the file module
Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/e89f1027
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/e89f1027
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/e89f1027
Branch: refs/heads/master
Commit: e89f1027875fbcec6482f24c3e316025eb16ee8e
Parents: 3418055
Author: Jan Hentschel <ja...@ultratendency.com>
Authored: Sat Feb 10 14:46:03 2018 +0100
Committer: Jeff Zhang <zj...@apache.org>
Committed: Wed Feb 14 12:35:05 2018 +0800
----------------------------------------------------------------------
file/pom.xml | 7 +
.../apache/zeppelin/file/FileInterpreter.java | 38 +-
.../org/apache/zeppelin/file/HDFSCommand.java | 23 +-
.../zeppelin/file/HDFSFileInterpreter.java | 97 +++--
.../zeppelin/file/HDFSFileInterpreterTest.java | 400 ++++++++++---------
5 files changed, 312 insertions(+), 253 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e89f1027/file/pom.xml
----------------------------------------------------------------------
diff --git a/file/pom.xml b/file/pom.xml
index e649991..ed0ef3f 100644
--- a/file/pom.xml
+++ b/file/pom.xml
@@ -91,6 +91,13 @@
<plugin>
<artifactId>maven-resources-plugin</artifactId>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-checkstyle-plugin</artifactId>
+ <configuration>
+ <skip>false</skip>
+ </configuration>
+ </plugin>
</plugins>
</build>
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e89f1027/file/src/main/java/org/apache/zeppelin/file/FileInterpreter.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/FileInterpreter.java b/file/src/main/java/org/apache/zeppelin/file/FileInterpreter.java
index cf83672..eea5650 100644
--- a/file/src/main/java/org/apache/zeppelin/file/FileInterpreter.java
+++ b/file/src/main/java/org/apache/zeppelin/file/FileInterpreter.java
@@ -18,6 +18,17 @@
package org.apache.zeppelin.file;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Properties;
+import java.util.StringTokenizer;
+
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
@@ -27,11 +38,6 @@ import org.apache.zeppelin.interpreter.InterpreterResult.Type;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.*;
/**
* File interpreter for Zeppelin.
@@ -48,7 +54,7 @@ public abstract class FileInterpreter extends Interpreter {
}
/**
- * Handling the arguments of the command
+ * Handling the arguments of the command.
*/
public class CommandArgs {
public String input = null;
@@ -74,25 +80,25 @@ public abstract class FileInterpreter extends Interpreter {
}
public void parseArgs() {
- if (input == null)
+ if (input == null) {
return;
+ }
StringTokenizer st = new StringTokenizer(input);
if (st.hasMoreTokens()) {
command = st.nextToken();
- while (st.hasMoreTokens())
+ while (st.hasMoreTokens()) {
parseArg(st.nextToken());
+ }
}
}
}
// Functions that each file system implementation must override
-
public abstract String listAll(String path) throws InterpreterException;
public abstract boolean isDirectory(String path);
// Combine paths, takes care of arguments such as ..
-
protected String getNewPath(String argument){
Path arg = Paths.get(argument);
Path ret = arg.isAbsolute() ? arg : Paths.get(currentDir, argument);
@@ -100,7 +106,6 @@ public abstract class FileInterpreter extends Interpreter {
}
// Handle the command handling uniformly across all file systems
-
@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
logger.info("Run File command '" + cmd + "'");
@@ -114,18 +119,15 @@ public abstract class FileInterpreter extends Interpreter {
}
// Simple parsing of the command
-
if (args.command.equals("cd")) {
-
String newPath = !args.args.isEmpty() ? getNewPath(args.args.get(0)) : currentDir;
- if (!isDirectory(newPath))
+ if (!isDirectory(newPath)) {
return new InterpreterResult(Code.ERROR, Type.TEXT, newPath + ": No such directory");
+ }
currentDir = newPath;
return new InterpreterResult(Code.SUCCESS, Type.TEXT, "OK");
-
} else if (args.command.equals("ls")) {
-
String newPath = !args.args.isEmpty() ? getNewPath(args.args.get(0)) : currentDir;
try {
String results = listAll(newPath);
@@ -136,13 +138,9 @@ public abstract class FileInterpreter extends Interpreter {
}
} else if (args.command.equals("pwd")) {
-
return new InterpreterResult(Code.SUCCESS, Type.TEXT, currentDir);
-
} else {
-
return new InterpreterResult(Code.ERROR, Type.TEXT, "Unknown command");
-
}
}
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e89f1027/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java b/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
index a097b88..6b3dc4b 100644
--- a/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
+++ b/file/src/main/java/org/apache/zeppelin/file/HDFSCommand.java
@@ -18,21 +18,21 @@
package org.apache.zeppelin.file;
-import java.net.URL;
-import java.net.HttpURLConnection;
+import org.slf4j.Logger;
+
import java.io.BufferedReader;
import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
import javax.ws.rs.core.UriBuilder;
-import org.slf4j.Logger;
/**
- * Definition and HTTP invocation methods for all WebHDFS commands
- *
+ * Definition and HTTP invocation methods for all WebHDFS commands.
*/
public class HDFSCommand {
-
/**
- * Type of HTTP request
+ * Type of HTTP request.
*/
public enum HttpType {
GET,
@@ -40,7 +40,7 @@ public class HDFSCommand {
}
/**
- * Definition of WebHDFS operator
+ * Definition of WebHDFS operator.
*/
public class Op {
public String op;
@@ -55,7 +55,7 @@ public class HDFSCommand {
}
/**
- * Definition of argument to an operator
+ * Definition of argument to an operator.
*/
public class Arg {
public String key;
@@ -90,8 +90,7 @@ public class HDFSCommand {
path == null ||
(op.minArgs > 0 &&
(args == null ||
- args.length != op.minArgs)))
- {
+ args.length != op.minArgs))) {
String a = "";
a = (op != null) ? a + op.op + "\n" : a;
a = (path != null) ? a + path + "\n" : a;
@@ -101,10 +100,8 @@ public class HDFSCommand {
return null;
}
-
// The operator that runs all commands
public String runCommand(Op op, String path, Arg[] args) throws Exception {
-
// Check arguments
String error = checkArgs(op, path, args);
if (error != null) {
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e89f1027/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
----------------------------------------------------------------------
diff --git a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
index d715ed9..b27dcb6 100644
--- a/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
+++ b/file/src/main/java/org/apache/zeppelin/file/HDFSFileInterpreter.java
@@ -18,11 +18,17 @@
package org.apache.zeppelin.file;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
import com.google.gson.Gson;
+
+import com.google.gson.annotations.SerializedName;
import org.apache.commons.lang.StringUtils;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Properties;
+
import org.apache.zeppelin.completer.CompletionType;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
@@ -30,7 +36,6 @@ import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
/**
* HDFS implementation of File interpreter for Zeppelin.
- *
*/
public class HDFSFileInterpreter extends FileInterpreter {
static final String HDFS_URL = "hdfs.url";
@@ -55,7 +60,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
}
/**
- * Status of one file
+ * Status of one file.
*
* matches returned JSON
*/
@@ -73,6 +78,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
public int replication;
public int storagePolicy;
public String type;
+
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("\nAccessTime = ").append(accessTime);
@@ -93,38 +99,41 @@ public class HDFSFileInterpreter extends FileInterpreter {
}
/**
- * Status of one file
+ * Status of one file.
*
* matches returned JSON
*/
public class SingleFileStatus {
- public OneFileStatus FileStatus;
+ @SerializedName("FileStatus")
+ public OneFileStatus fileStatus;
}
/**
- * Status of all files in a directory
+ * Status of all files in a directory.
*
* matches returned JSON
*/
public class MultiFileStatus {
- public OneFileStatus[] FileStatus;
+ @SerializedName("FileStatus")
+ public OneFileStatus[] fileStatus;
}
/**
- * Status of all files in a directory
+ * Status of all files in a directory.
*
* matches returned JSON
*/
public class AllFileStatus {
- public MultiFileStatus FileStatuses;
+ @SerializedName("FileStatuses")
+ public MultiFileStatus fileStatuses;
}
// tests whether we're able to connect to HDFS
-
private void testConnection() {
try {
- if (isDirectory("/"))
+ if (isDirectory("/")) {
logger.info("Successfully created WebHDFS connection");
+ }
} catch (Exception e) {
logger.error("testConnection: Cannot open WebHDFS connection. Bad URL: " + "/", e);
exceptionOnConnect = e;
@@ -159,9 +168,11 @@ public class HDFSFileInterpreter extends FileInterpreter {
sb.append(((p & 0x1) == 0) ? '-' : 'x');
return sb.toString();
}
+
private String listDate(OneFileStatus fs) {
return new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date(fs.modificationTime));
}
+
private String listOne(String path, OneFileStatus fs) {
if (args.flags.contains(new Character('l'))) {
StringBuilder sb = new StringBuilder();
@@ -183,7 +194,11 @@ public class HDFSFileInterpreter extends FileInterpreter {
private String humanReadableByteCount(long bytes) {
int unit = 1024;
- if (bytes < unit) return bytes + " B";
+
+ if (bytes < unit) {
+ return bytes + " B";
+ }
+
int exp = (int) (Math.log(bytes) / Math.log(unit));
String pre = "KMGTPE".charAt(exp - 1) + "";
return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
@@ -194,7 +209,7 @@ public class HDFSFileInterpreter extends FileInterpreter {
String str = cmd.runCommand(cmd.getFileStatus, filePath, null);
SingleFileStatus sfs = gson.fromJson(str, SingleFileStatus.class);
if (sfs != null) {
- return listOne(filePath, sfs.FileStatus);
+ return listOne(filePath, sfs.fileStatus);
}
} catch (Exception e) {
logger.error("listFile: " + filePath, e);
@@ -204,8 +219,10 @@ public class HDFSFileInterpreter extends FileInterpreter {
public String listAll(String path) throws InterpreterException {
String all = "";
- if (exceptionOnConnect != null)
+ if (exceptionOnConnect != null) {
return "Error connecting to provided endpoint.";
+ }
+
try {
//see if directory.
if (isDirectory(path)) {
@@ -214,13 +231,12 @@ public class HDFSFileInterpreter extends FileInterpreter {
AllFileStatus allFiles = gson.fromJson(sfs, AllFileStatus.class);
if (allFiles != null &&
- allFiles.FileStatuses != null &&
- allFiles.FileStatuses.FileStatus != null)
- {
- int length = cmd.maxLength < allFiles.FileStatuses.FileStatus.length ? cmd.maxLength :
- allFiles.FileStatuses.FileStatus.length;
+ allFiles.fileStatuses != null &&
+ allFiles.fileStatuses.fileStatus != null) {
+ int length = cmd.maxLength < allFiles.fileStatuses.fileStatus.length ? cmd.maxLength :
+ allFiles.fileStatuses.fileStatus.length;
for (int index = 0; index < length; index++) {
- OneFileStatus fs = allFiles.FileStatuses.FileStatus[index];
+ OneFileStatus fs = allFiles.fileStatuses.fileStatus[index];
all = all + listOne(path, fs) + '\n';
}
}
@@ -237,13 +253,16 @@ public class HDFSFileInterpreter extends FileInterpreter {
public boolean isDirectory(String path) {
boolean ret = false;
- if (exceptionOnConnect != null)
+ if (exceptionOnConnect != null) {
return ret;
+ }
+
try {
String str = cmd.runCommand(cmd.getFileStatus, path, null);
SingleFileStatus sfs = gson.fromJson(str, SingleFileStatus.class);
- if (sfs != null)
- return sfs.FileStatus.type.equals("DIRECTORY");
+ if (sfs != null) {
+ return sfs.fileStatus.type.equals("DIRECTORY");
+ }
} catch (Exception e) {
logger.error("IsDirectory: " + path, e);
return false;
@@ -251,7 +270,6 @@ public class HDFSFileInterpreter extends FileInterpreter {
return ret;
}
-
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext) {
@@ -266,17 +284,22 @@ public class HDFSFileInterpreter extends FileInterpreter {
//part of a command == no spaces
if (buf.split(" ").length == 1){
- if ("cd".contains(buf)) suggestions.add(new InterpreterCompletion("cd", "cd",
- CompletionType.command.name()));
- if ("ls".contains(buf)) suggestions.add(new InterpreterCompletion("ls", "ls",
- CompletionType.command.name()));
- if ("pwd".contains(buf)) suggestions.add(new InterpreterCompletion("pwd", "pwd",
- CompletionType.command.name()));
+ if ("cd".contains(buf)) {
+ suggestions.add(new InterpreterCompletion("cd", "cd",
+ CompletionType.command.name()));
+ }
+ if ("ls".contains(buf)) {
+ suggestions.add(new InterpreterCompletion("ls", "ls",
+ CompletionType.command.name()));
+ }
+ if ("pwd".contains(buf)) {
+ suggestions.add(new InterpreterCompletion("pwd", "pwd",
+ CompletionType.command.name()));
+ }
return suggestions;
}
-
// last word will contain the path we're working with.
String lastToken = buf.substring(buf.lastIndexOf(" ") + 1);
if (lastToken.startsWith("-")) { //flag not path
@@ -298,12 +321,10 @@ public class HDFSFileInterpreter extends FileInterpreter {
AllFileStatus allFiles = gson.fromJson(fileStatusString, AllFileStatus.class);
if (allFiles != null &&
- allFiles.FileStatuses != null &&
- allFiles.FileStatuses.FileStatus != null)
- {
- for (OneFileStatus fs : allFiles.FileStatuses.FileStatus) {
+ allFiles.fileStatuses != null &&
+ allFiles.fileStatuses.fileStatus != null) {
+ for (OneFileStatus fs : allFiles.fileStatuses.fileStatus) {
if (fs.pathSuffix.contains(unfinished)) {
-
//only suggest the text after the last .
String beforeLastPeriod = unfinished.substring(0, unfinished.lastIndexOf('.') + 1);
//beforeLastPeriod should be the start of fs.pathSuffix, so take the end of it.
http://git-wip-us.apache.org/repos/asf/zeppelin/blob/e89f1027/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
----------------------------------------------------------------------
diff --git a/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java b/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
index adc9bd6..aa69886 100644
--- a/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
+++ b/file/src/test/java/org/apache/zeppelin/file/HDFSFileInterpreterTest.java
@@ -18,13 +18,12 @@
package org.apache.zeppelin.file;
+import static org.junit.Assert.assertNull;
+
import com.google.gson.Gson;
+
import junit.framework.TestCase;
-import static org.junit.Assert.*;
-import org.apache.zeppelin.completer.CompletionType;
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.junit.Test;
import org.slf4j.Logger;
@@ -32,223 +31,260 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
-import java.lang.Override;
-import java.lang.String;
+import org.apache.zeppelin.completer.CompletionType;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
/**
- * Tests Interpreter by running pre-determined commands against mock file system
- *
+ * Tests Interpreter by running pre-determined commands against mock file system.
*/
public class HDFSFileInterpreterTest extends TestCase {
+ @Test
+ public void testMaxLength() {
+ HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
+ t.open();
+ InterpreterResult result = t.interpret("ls -l /", null);
+ String lineSeparator = "\n";
+ int fileStatusLength = MockFileSystem.FILE_STATUSES.split(lineSeparator).length;
+ assertEquals(result.message().get(0).getData().split(lineSeparator).length, fileStatusLength);
+ t.close();
+
+ Properties properties = new Properties();
+ final int maxLength = fileStatusLength - 2;
+ properties.setProperty("hdfs.maxlength", String.valueOf(maxLength));
+ HDFSFileInterpreter t1 = new MockHDFSFileInterpreter(properties);
+ t1.open();
+ InterpreterResult result1 = t1.interpret("ls -l /", null);
+ assertEquals(result1.message().get(0).getData().split(lineSeparator).length, maxLength);
+ t1.close();
+ }
- @Test
- public void testMaxLength() {
-
- HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
- t.open();
- InterpreterResult result = t.interpret("ls -l /", null);
- String lineSeparator = "\n";
- int fileStatusLength = MockFileSystem.fileStatuses.split(lineSeparator).length;
- assertEquals(result.message().get(0).getData().split(lineSeparator).length, fileStatusLength);
- t.close();
-
- Properties properties = new Properties();
- final int maxLength = fileStatusLength - 2;
- properties.setProperty("hdfs.maxlength", String.valueOf(maxLength));
- HDFSFileInterpreter t1 = new MockHDFSFileInterpreter(properties);
- t1.open();
- InterpreterResult result1 = t1.interpret("ls -l /", null);
- assertEquals(result1.message().get(0).getData().split(lineSeparator).length, maxLength);
- t1.close();
- }
-
- @Test
- public void test() {
- HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
- t.open();
-
- // We have info for /, /user, /tmp, /mr-history/done
-
- // Ensure
- // 1. ls -l works
- // 2. paths (. and ..) are correctly handled
- // 3. flags and arguments to commands are correctly handled
+ @Test
+ public void test() {
+ HDFSFileInterpreter t = new MockHDFSFileInterpreter(new Properties());
+ t.open();
- InterpreterResult result1 = t.interpret("ls -l /", null);
- assertEquals(result1.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ // We have info for /, /user, /tmp, /mr-history/done
- InterpreterResult result2 = t.interpret("ls -l /./user/..", null);
- assertEquals(result2.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ // Ensure
+ // 1. ls -l works
+ // 2. paths (. and ..) are correctly handled
+ // 3. flags and arguments to commands are correctly handled
+ InterpreterResult result1 = t.interpret("ls -l /", null);
+ assertEquals(result1.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result1.message().get(0).getData(), result2.message().get(0).getData());
+ InterpreterResult result2 = t.interpret("ls -l /./user/..", null);
+ assertEquals(result2.message().get(0).getType(), InterpreterResult.Type.TEXT);
- // Ensure you can do cd and after that the ls uses current directory correctly
+ assertEquals(result1.message().get(0).getData(), result2.message().get(0).getData());
- InterpreterResult result3 = t.interpret("cd user", null);
- assertEquals(result3.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result3.message().get(0).getData(), "OK");
+ // Ensure you can do cd and after that the ls uses current directory correctly
+ InterpreterResult result3 = t.interpret("cd user", null);
+ assertEquals(result3.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ assertEquals(result3.message().get(0).getData(), "OK");
- InterpreterResult result4 = t.interpret("ls", null);
- assertEquals(result4.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ InterpreterResult result4 = t.interpret("ls", null);
+ assertEquals(result4.message().get(0).getType(), InterpreterResult.Type.TEXT);
- InterpreterResult result5 = t.interpret("ls /user", null);
- assertEquals(result5.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ InterpreterResult result5 = t.interpret("ls /user", null);
+ assertEquals(result5.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result4.message().get(0).getData(), result5.message().get(0).getData());
+ assertEquals(result4.message().get(0).getData(), result5.message().get(0).getData());
- // Ensure pwd works correctly
+ // Ensure pwd works correctly
+ InterpreterResult result6 = t.interpret("pwd", null);
+ assertEquals(result6.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ assertEquals(result6.message().get(0).getData(), "/user");
- InterpreterResult result6 = t.interpret("pwd", null);
- assertEquals(result6.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result6.message().get(0).getData(), "/user");
+ // Move a couple of levels and check we're in the right place
+ InterpreterResult result7 = t.interpret("cd ../mr-history/done", null);
+ assertEquals(result7.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ assertEquals(result7.message().get(0).getData(), "OK");
- // Move a couple of levels and check we're in the right place
+ InterpreterResult result8 = t.interpret("ls -l ", null);
+ assertEquals(result8.message().get(0).getType(), InterpreterResult.Type.TEXT);
- InterpreterResult result7 = t.interpret("cd ../mr-history/done", null);
- assertEquals(result7.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result7.message().get(0).getData(), "OK");
+ InterpreterResult result9 = t.interpret("ls -l /mr-history/done", null);
+ assertEquals(result9.message().get(0).getType(), InterpreterResult.Type.TEXT);
- InterpreterResult result8 = t.interpret("ls -l ", null);
- assertEquals(result8.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ assertEquals(result8.message().get(0).getData(), result9.message().get(0).getData());
- InterpreterResult result9 = t.interpret("ls -l /mr-history/done", null);
- assertEquals(result9.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ InterpreterResult result10 = t.interpret("cd ../..", null);
+ assertEquals(result10.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ assertEquals(result7.message().get(0).getData(), "OK");
- assertEquals(result8.message().get(0).getData(), result9.message().get(0).getData());
+ InterpreterResult result11 = t.interpret("ls -l ", null);
+ assertEquals(result11.message().get(0).getType(), InterpreterResult.Type.TEXT);
- InterpreterResult result10 = t.interpret("cd ../..", null);
- assertEquals(result10.message().get(0).getType(), InterpreterResult.Type.TEXT);
- assertEquals(result7.message().get(0).getData(), "OK");
+ // we should be back to first result after all this navigation
+ assertEquals(result1.message().get(0).getData(), result11.message().get(0).getData());
- InterpreterResult result11 = t.interpret("ls -l ", null);
- assertEquals(result11.message().get(0).getType(), InterpreterResult.Type.TEXT);
+ // auto completion test
+ List expectedResultOne = Arrays.asList(
+ new InterpreterCompletion("ls", "ls", CompletionType.command.name()));
+ List expectedResultTwo = Arrays.asList(
+ new InterpreterCompletion("pwd", "pwd", CompletionType.command.name()));
+ List<InterpreterCompletion> resultOne = t.completion("l", 0, null);
+ List<InterpreterCompletion> resultTwo = t.completion("p", 0, null);
- // we should be back to first result after all this navigation
- assertEquals(result1.message().get(0).getData(), result11.message().get(0).getData());
+ assertEquals(expectedResultOne, resultOne);
+ assertEquals(expectedResultTwo, resultTwo);
- // auto completion test
- List expectedResultOne = Arrays.asList(
- new InterpreterCompletion("ls", "ls", CompletionType.command.name()));
- List expectedResultTwo = Arrays.asList(
- new InterpreterCompletion("pwd", "pwd", CompletionType.command.name()));
- List<InterpreterCompletion> resultOne = t.completion("l", 0, null);
- List<InterpreterCompletion> resultTwo = t.completion("p", 0, null);
+ t.close();
+ }
+}
- assertEquals(expectedResultOne, resultOne);
- assertEquals(expectedResultTwo, resultTwo);
+/**
+ * Store command results from curl against a real file system.
+ */
+class MockFileSystem {
+ HashMap<String, String> mfs = new HashMap<>();
+ static final String FILE_STATUSES =
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16389," +
+ "\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1438548219672," +
+ "\"owner\":\"yarn\",\"pathSuffix\":\"app-logs\",\"permission\":\"777\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16395," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548030045," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"hdp\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16390," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985336," +
+ "\"owner\":\"mapred\",\"pathSuffix\":\"mapred\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":2,\"fileId\":16392," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985346," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"mr-history\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16400," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"system\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548150089," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"tmp\",\"permission\":\"777\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547921792," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"user\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n";
+
+ void addListStatusData() {
+ mfs.put("/?op=LISTSTATUS",
+ "{\"FileStatuses\":{\"FileStatus\":[\n" + FILE_STATUSES +
+ "]}}"
+ );
+ mfs.put("/user?op=LISTSTATUS", "{\"FileStatuses\":{\"FileStatus\":[\n" +
+ " {\"accessTime\":0,\"blockSize\":0,\"childrenNum\":4,\"fileId\":16388," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253161263," +
+ "\"owner\":\"ambari-qa\",\"pathSuffix\":\"ambari-qa\",\"permission\":\"770\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n" +
+ " ]}}"
+ );
+ mfs.put("/tmp?op=LISTSTATUS",
+ "{\"FileStatuses\":{\"FileStatus\":[\n" +
+ " {\"accessTime\":1441253097489,\"blockSize\":134217728,\"childrenNum\":0," +
+ "\"fileId\":16400,\"group\":\"hdfs\",\"length\":1645," +
+ "\"modificationTime\":1441253097517,\"owner\":\"hdfs\"," +
+ "\"pathSuffix\":\"ida8c06540_date040315\",\"permission\":\"755\"," +
+ "\"replication\":3,\"storagePolicy\":0,\"type\":\"FILE\"}\n" +
+ " ]}}"
+ );
+ mfs.put("/mr-history/done?op=LISTSTATUS",
+ "{\"FileStatuses\":{\"FileStatus\":[\n" +
+ "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16433," +
+ "\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1441253197481," +
+ "\"owner\":\"mapred\",\"pathSuffix\":\"2015\",\"permission\":\"770\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n" +
+ "]}}"
+ );
+ }
- t.close();
- }
+ void addGetFileStatusData() {
+ mfs.put("/?op=GETFILESTATUS",
+ "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":7,\"fileId\":16385," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
+ mfs.put("/user?op=GETFILESTATUS",
+ "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253043188," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"755\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
+ mfs.put("/tmp?op=GETFILESTATUS",
+ "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386," +
+ "\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253097489," +
+ "\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"777\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
+ mfs.put("/mr-history/done?op=GETFILESTATUS",
+ "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16393," +
+ "\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1441253197480," +
+ "\"owner\":\"mapred\",\"pathSuffix\":\"\",\"permission\":\"777\"," +
+ "\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
}
- /**
- * Store command results from curl against a real file system
- */
- class MockFileSystem {
- HashMap<String, String> mfs = new HashMap<>();
- static final String fileStatuses =
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16389,\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1438548219672,\"owner\":\"yarn\",\"pathSuffix\":\"app-logs\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16395,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548030045,\"owner\":\"hdfs\",\"pathSuffix\":\"hdp\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16390,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985336,\"owner\":\"mapred\",\"pathSuffix\":\"mapred\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":2,\"fileId\":16392,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547985346,\"owner\":\"hdfs\",\"pathSuffix\":\"mr-history\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16400,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725,\"owner\":\"hdfs\",\"pathSuffix\":\"system\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548150089,\"owner\":\"hdfs\",\"pathSuffix\":\"tmp\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"},\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438547921792,\"owner\":\"hdfs\",\"pathSuffix\":\"user\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n";
- void addListStatusData() {
- mfs.put("/?op=LISTSTATUS",
- "{\"FileStatuses\":{\"FileStatus\":[\n" + fileStatuses +
- "]}}"
- );
- mfs.put("/user?op=LISTSTATUS",
- "{\"FileStatuses\":{\"FileStatus\":[\n" +
- " {\"accessTime\":0,\"blockSize\":0,\"childrenNum\":4,\"fileId\":16388,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253161263,\"owner\":\"ambari-qa\",\"pathSuffix\":\"ambari-qa\",\"permission\":\"770\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n" +
- " ]}}"
- );
- mfs.put("/tmp?op=LISTSTATUS",
- "{\"FileStatuses\":{\"FileStatus\":[\n" +
- " {\"accessTime\":1441253097489,\"blockSize\":134217728,\"childrenNum\":0,\"fileId\":16400,\"group\":\"hdfs\",\"length\":1645,\"modificationTime\":1441253097517,\"owner\":\"hdfs\",\"pathSuffix\":\"ida8c06540_date040315\",\"permission\":\"755\",\"replication\":3,\"storagePolicy\":0,\"type\":\"FILE\"}\n" +
- " ]}}"
- );
- mfs.put("/mr-history/done?op=LISTSTATUS",
- "{\"FileStatuses\":{\"FileStatus\":[\n" +
- "{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16433,\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1441253197481,\"owner\":\"mapred\",\"pathSuffix\":\"2015\",\"permission\":\"770\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}\n" +
- "]}}"
- );
- }
- void addGetFileStatusData() {
- mfs.put("/?op=GETFILESTATUS",
- "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":7,\"fileId\":16385,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1438548089725,\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
- mfs.put("/user?op=GETFILESTATUS",
- "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16387,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253043188,\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"755\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
- mfs.put("/tmp?op=GETFILESTATUS",
- "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16386,\"group\":\"hdfs\",\"length\":0,\"modificationTime\":1441253097489,\"owner\":\"hdfs\",\"pathSuffix\":\"\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
- mfs.put("/mr-history/done?op=GETFILESTATUS",
- "{\"FileStatus\":{\"accessTime\":0,\"blockSize\":0,\"childrenNum\":1,\"fileId\":16393,\"group\":\"hadoop\",\"length\":0,\"modificationTime\":1441253197480,\"owner\":\"mapred\",\"pathSuffix\":\"\",\"permission\":\"777\",\"replication\":0,\"storagePolicy\":0,\"type\":\"DIRECTORY\"}}");
- }
- public void addMockData(HDFSCommand.Op op) {
- if (op.op.equals("LISTSTATUS")) {
- addListStatusData();
- } else if (op.op.equals("GETFILESTATUS")) {
- addGetFileStatusData();
- }
- // do nothing
- }
- public String get(String key) {
- return mfs.get(key);
+ public void addMockData(HDFSCommand.Op op) {
+ if (op.op.equals("LISTSTATUS")) {
+ addListStatusData();
+ } else if (op.op.equals("GETFILESTATUS")) {
+ addGetFileStatusData();
}
+ // do nothing
}
- /**
- * Run commands against mock file system that simulates webhdfs responses
- */
- class MockHDFSCommand extends HDFSCommand {
- MockFileSystem fs = null;
-
- public MockHDFSCommand(String url, String user, Logger logger, int maxLength) {
- super(url, user, logger, maxLength);
- fs = new MockFileSystem();
- fs.addMockData(getFileStatus);
- fs.addMockData(listStatus);
- }
+ public String get(String key) {
+ return mfs.get(key);
+ }
+}
- public MockHDFSCommand(String url, String user, Logger logger) {
- this(url, user, logger, 1000);
- }
+/**
+ * Run commands against mock file system that simulates webhdfs responses.
+ */
+class MockHDFSCommand extends HDFSCommand {
+ MockFileSystem fs = null;
+
+ MockHDFSCommand(String url, String user, Logger logger, int maxLength) {
+ super(url, user, logger, maxLength);
+ fs = new MockFileSystem();
+ fs.addMockData(getFileStatus);
+ fs.addMockData(listStatus);
+ }
- @Override
- public String runCommand(Op op, String path, Arg[] args) throws Exception {
+ MockHDFSCommand(String url, String user, Logger logger) {
+ this(url, user, logger, 1000);
+ }
- String error = checkArgs(op, path, args);
- assertNull(error);
+ @Override
+ public String runCommand(Op op, String path, Arg[] args) throws Exception {
+ String error = checkArgs(op, path, args);
+ assertNull(error);
- String c = path + "?op=" + op.op;
+ String c = path + "?op=" + op.op;
- if (args != null) {
- for (Arg a : args) {
- c += "&" + a.key + "=" + a.value;
- }
+ if (args != null) {
+ for (Arg a : args) {
+ c += "&" + a.key + "=" + a.value;
}
- return fs.get(c);
}
+ return fs.get(c);
}
+}
- /**
- * Mock Interpreter - uses Mock HDFS command
- */
- class MockHDFSFileInterpreter extends HDFSFileInterpreter {
-
- @Override
- public void prepare() {
- // Run commands against mock File System instead of WebHDFS
- int i = Integer.parseInt(getProperty(HDFS_MAXLENGTH) == null ? "1000"
- : getProperty(HDFS_MAXLENGTH));
- cmd = new MockHDFSCommand("", "", logger, i);
- gson = new Gson();
- }
-
- public MockHDFSFileInterpreter(Properties property) {
- super(property);
- }
+/**
+ * Mock Interpreter - uses Mock HDFS command.
+ */
+class MockHDFSFileInterpreter extends HDFSFileInterpreter {
+ @Override
+ public void prepare() {
+ // Run commands against mock File System instead of WebHDFS
+ int i = Integer.parseInt(getProperty(HDFS_MAXLENGTH) == null ? "1000"
+ : getProperty(HDFS_MAXLENGTH));
+ cmd = new MockHDFSCommand("", "", logger, i);
+ gson = new Gson();
+ }
-}
\ No newline at end of file
+ MockHDFSFileInterpreter(Properties property) {
+ super(property);
+ }
+}