You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:28:20 UTC
[49/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime
tests option into the battery of smoke tests
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests
IDEA code reformating
Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/a05d3813
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/a05d3813
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/a05d3813
Branch: refs/heads/master
Commit: a05d3813f67979f74c0494fb118f98a0264266dc
Parents: 0f51fb3
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 09:49:53 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:17 2017 -0700
----------------------------------------------------------------------
.../odpi/specs/runtime/hadoop/ApiExaminer.java | 732 ++++++++---------
.../org/odpi/specs/runtime/hive/HCatalogMR.java | 173 ++--
.../org/odpi/specs/runtime/hive/HiveHelper.java | 158 ++--
.../odpi/specs/runtime/hive/JdbcConnector.java | 78 +-
.../odpi/specs/runtime/hive/TestBeeline.java | 364 +++++----
.../org/odpi/specs/runtime/hive/TestCLI.java | 387 ++++-----
.../odpi/specs/runtime/hive/TestHCatalog.java | 202 ++---
.../org/odpi/specs/runtime/hive/TestJdbc.java | 814 ++++++++++---------
.../org/odpi/specs/runtime/hive/TestSql.java | 524 ++++++------
.../org/odpi/specs/runtime/hive/TestThrift.java | 396 ++++-----
10 files changed, 1941 insertions(+), 1887 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index a8febdb..77db1b5 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -48,442 +48,444 @@ import java.util.regex.Pattern;
*/
public class ApiExaminer {
- private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
-
- static private Set<String> unloadableClasses;
-
- private List<String> errors;
- private List<String> warnings;
-
- static {
- unloadableClasses = new HashSet<>();
- unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
- unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
- unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
- unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
-
- }
-
- public static void main(String[] args) {
- Options options = new Options();
-
- options.addOption("c", "compare", true,
- "Compare against a spec, argument is the json file containing spec");
- options.addOption("h", "help", false, "You're looking at it");
- options.addOption("j", "jar", true, "Jar to examine");
- options.addOption("p", "prepare-spec", true,
- "Prepare the spec, argument is the directory to write the spec to");
-
- try {
- CommandLine cli = new GnuParser().parse(options, args);
-
- if (cli.hasOption('h')) {
- usage(options);
- return;
- }
-
- if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
- (cli.hasOption('c') && cli.hasOption('p'))) {
- System.err.println("You must choose either -c or -p");
- usage(options);
- return;
- }
-
- if (!cli.hasOption('j')) {
- System.err.println("You must specify the jar to prepare or compare");
- usage(options);
- return;
- }
-
- String jar = cli.getOptionValue('j');
- ApiExaminer examiner = new ApiExaminer();
-
- if (cli.hasOption('c')) {
- examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
- } else if (cli.hasOption('p')) {
- examiner.prepareExpected(jar, cli.getOptionValue('p'));
- }
- } catch (Exception e) {
- System.err.println("Received exception while processing");
- e.printStackTrace();
- }
- }
-
- private static void usage(Options options) {
- HelpFormatter help = new HelpFormatter();
- help.printHelp("api-examiner", options);
-
- }
-
- private ApiExaminer() {
- }
-
- private void prepareExpected(String jarFile, String outputDir) throws IOException,
- ClassNotFoundException {
- JarInfo jarInfo = new JarInfo(jarFile, this);
- jarInfo.dumpToFile(new File(outputDir));
- }
-
- private void compareAgainstStandard(String json, String jarFile) throws IOException,
- ClassNotFoundException {
- errors = new ArrayList<>();
- warnings = new ArrayList<>();
- JarInfo underTest = new JarInfo(jarFile, this);
- JarInfo standard = jarInfoFromFile(new File(json));
- standard.compareAndReport(underTest);
-
- if (errors.size() > 0) {
- System.err.println("Found " + errors.size() + " incompatibilities:");
- for (String error : errors) {
- System.err.println(error);
- }
- }
+ private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+ static private Set<String> unloadableClasses;
+
+ private List<String> errors;
+ private List<String> warnings;
+
+ static {
+ unloadableClasses = new HashSet<>();
+ unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+ unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+ unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+ unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
- if (warnings.size() > 0) {
- System.err.println("Found " + warnings.size() + " possible issues: ");
- for (String warning : warnings) {
- System.err.println(warning);
- }
}
+ public static void main(String[] args) {
+ Options options = new Options();
+
+ options.addOption("c", "compare", true,
+ "Compare against a spec, argument is the json file containing spec");
+ options.addOption("h", "help", false, "You're looking at it");
+ options.addOption("j", "jar", true, "Jar to examine");
+ options.addOption("p", "prepare-spec", true,
+ "Prepare the spec, argument is the directory to write the spec to");
- }
+ try {
+ CommandLine cli = new GnuParser().parse(options, args);
- private JarInfo jarInfoFromFile(File inputFile) throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
- jarInfo.patchUpClassBackPointers(this);
- return jarInfo;
- }
+ if (cli.hasOption('h')) {
+ usage(options);
+ return;
+ }
- private static class JarInfo {
- String name;
- String version;
- ApiExaminer container;
- Map<String, ClassInfo> classes;
+ if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+ (cli.hasOption('c') && cli.hasOption('p'))) {
+ System.err.println("You must choose either -c or -p");
+ usage(options);
+ return;
+ }
- // For use by Jackson
- public JarInfo() {
+ if (!cli.hasOption('j')) {
+ System.err.println("You must specify the jar to prepare or compare");
+ usage(options);
+ return;
+ }
- }
+ String jar = cli.getOptionValue('j');
+ ApiExaminer examiner = new ApiExaminer();
- JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
- this.container = container;
- LOG.info("Processing jar " + jarFile);
- File f = new File(jarFile);
- Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
- Matcher matcher = pattern.matcher(f.getName());
- if (!matcher.matches()) {
- String msg = "Unable to determine name and version from " + f.getName();
- LOG.error(msg);
- throw new RuntimeException(msg);
- }
- name = matcher.group(1);
- version = matcher.group(2);
- classes = new HashMap<>();
-
- JarFile jar = new JarFile(jarFile);
- Enumeration<JarEntry> entries = jar.entries();
- while (entries.hasMoreElements()) {
- String name = entries.nextElement().getName();
- if (name.endsWith(".class")) {
- name = name.substring(0, name.length() - 6);
- name = name.replace('/', '.');
- if (!unloadableClasses.contains(name)) {
- LOG.debug("Processing class " + name);
- Class<?> clazz = Class.forName(name);
- if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
- clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
- classes.put(name, new ClassInfo(this, clazz));
+ if (cli.hasOption('c')) {
+ examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+ } else if (cli.hasOption('p')) {
+ examiner.prepareExpected(jar, cli.getOptionValue('p'));
}
- }
+ } catch (Exception e) {
+ System.err.println("Received exception while processing");
+ e.printStackTrace();
}
- }
}
- public String getName() {
- return name;
- }
+ private static void usage(Options options) {
+ HelpFormatter help = new HelpFormatter();
+ help.printHelp("api-examiner", options);
- public void setName(String name) {
- this.name = name;
}
- public String getVersion() {
- return version;
+ private ApiExaminer() {
}
- public void setVersion(String version) {
- this.version = version;
+ private void prepareExpected(String jarFile, String outputDir) throws IOException,
+ ClassNotFoundException {
+ JarInfo jarInfo = new JarInfo(jarFile, this);
+ jarInfo.dumpToFile(new File(outputDir));
}
- public Map<String, ClassInfo> getClasses() {
- return classes;
+ private void compareAgainstStandard(String json, String jarFile) throws IOException,
+ ClassNotFoundException {
+ errors = new ArrayList<>();
+ warnings = new ArrayList<>();
+ JarInfo underTest = new JarInfo(jarFile, this);
+ JarInfo standard = jarInfoFromFile(new File(json));
+ standard.compareAndReport(underTest);
+
+ if (errors.size() > 0) {
+ System.err.println("Found " + errors.size() + " incompatibilities:");
+ for (String error : errors) {
+ System.err.println(error);
+ }
+ }
+
+ if (warnings.size() > 0) {
+ System.err.println("Found " + warnings.size() + " possible issues: ");
+ for (String warning : warnings) {
+ System.err.println(warning);
+ }
+ }
+
+
}
- public void setClasses(Map<String, ClassInfo> classes) {
- this.classes = classes;
+ private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+ jarInfo.patchUpClassBackPointers(this);
+ return jarInfo;
}
- void compareAndReport(JarInfo underTest) {
- Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
- for (ClassInfo classInfo : classes.values()) {
- if (underTestClasses.contains(classInfo)) {
- classInfo.compareAndReport(underTest.classes.get(classInfo.name));
- underTestClasses.remove(classInfo);
- } else {
- container.errors.add(underTest + " does not contain class " + classInfo);
+ private static class JarInfo {
+ String name;
+ String version;
+ ApiExaminer container;
+ Map<String, ClassInfo> classes;
+
+ // For use by Jackson
+ public JarInfo() {
+
}
- }
- if (underTestClasses.size() > 0) {
- for (ClassInfo extra : underTestClasses) {
- container.warnings.add(underTest + " contains extra class " + extra);
+ JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+ this.container = container;
+ LOG.info("Processing jar " + jarFile);
+ File f = new File(jarFile);
+ Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+ Matcher matcher = pattern.matcher(f.getName());
+ if (!matcher.matches()) {
+ String msg = "Unable to determine name and version from " + f.getName();
+ LOG.error(msg);
+ throw new RuntimeException(msg);
+ }
+ name = matcher.group(1);
+ version = matcher.group(2);
+ classes = new HashMap<>();
+
+ JarFile jar = new JarFile(jarFile);
+ Enumeration<JarEntry> entries = jar.entries();
+ while (entries.hasMoreElements()) {
+ String name = entries.nextElement().getName();
+ if (name.endsWith(".class")) {
+ name = name.substring(0, name.length() - 6);
+ name = name.replace('/', '.');
+ if (!unloadableClasses.contains(name)) {
+ LOG.debug("Processing class " + name);
+ Class<?> clazz = Class.forName(name);
+ if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+ clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+ classes.put(name, new ClassInfo(this, clazz));
+ }
+ }
+ }
+ }
}
- }
- }
- void dumpToFile(File outputDir) throws IOException {
- File output = new File(outputDir, name + "-" + version + "-api-report.json");
- ObjectMapper mapper = new ObjectMapper();
- mapper.writeValue(output, this);
- }
+ public String getName() {
+ return name;
+ }
- void patchUpClassBackPointers(ApiExaminer container) {
- this.container = container;
- for (ClassInfo classInfo : classes.values()) {
- classInfo.setJar(this);
- classInfo.patchUpBackMethodBackPointers();
- }
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof JarInfo)) return false;
- JarInfo that = (JarInfo)other;
- return name.equals(that.name) && version.equals(that.version);
- }
+ public String getVersion() {
+ return version;
+ }
- @Override
- public String toString() {
- return name + "-" + version;
- }
- }
+ public void setVersion(String version) {
+ this.version = version;
+ }
- private static class ClassInfo {
- @JsonIgnore JarInfo jar;
- String name;
- Map<String, MethodInfo> methods;
+ public Map<String, ClassInfo> getClasses() {
+ return classes;
+ }
- // For use by Jackson
- public ClassInfo() {
+ public void setClasses(Map<String, ClassInfo> classes) {
+ this.classes = classes;
+ }
- }
+ void compareAndReport(JarInfo underTest) {
+ Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+ for (ClassInfo classInfo : classes.values()) {
+ if (underTestClasses.contains(classInfo)) {
+ classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+ underTestClasses.remove(classInfo);
+ } else {
+ container.errors.add(underTest + " does not contain class " + classInfo);
+ }
+ }
- ClassInfo(JarInfo jar, Class<?> clazz) {
- this.jar = jar;
- this.name = clazz.getName();
- methods = new HashMap<>();
+ if (underTestClasses.size() > 0) {
+ for (ClassInfo extra : underTestClasses) {
+ container.warnings.add(underTest + " contains extra class " + extra);
+ }
+ }
+ }
- for (Method method : clazz.getMethods()) {
- if (method.getDeclaringClass().equals(clazz)) {
- LOG.debug("Processing method " + method.getName());
- MethodInfo mi = new MethodInfo(this, method);
- methods.put(mi.toString(), mi);
+ void dumpToFile(File outputDir) throws IOException {
+ File output = new File(outputDir, name + "-" + version + "-api-report.json");
+ ObjectMapper mapper = new ObjectMapper();
+ mapper.writeValue(output, this);
}
- }
- }
- public JarInfo getJar() {
- return jar;
- }
+ void patchUpClassBackPointers(ApiExaminer container) {
+ this.container = container;
+ for (ClassInfo classInfo : classes.values()) {
+ classInfo.setJar(this);
+ classInfo.patchUpBackMethodBackPointers();
+ }
+ }
- public void setJar(JarInfo jar) {
- this.jar = jar;
- }
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof JarInfo)) return false;
+ JarInfo that = (JarInfo) other;
+ return name.equals(that.name) && version.equals(that.version);
+ }
- public String getName() {
- return name;
+ @Override
+ public String toString() {
+ return name + "-" + version;
+ }
}
- public void setName(String name) {
- this.name = name;
- }
+ private static class ClassInfo {
+ @JsonIgnore
+ JarInfo jar;
+ String name;
+ Map<String, MethodInfo> methods;
- public Map<String, MethodInfo> getMethods() {
- return methods;
- }
+ // For use by Jackson
+ public ClassInfo() {
- public void setMethods(Map<String, MethodInfo> methods) {
- this.methods = methods;
- }
+ }
- void compareAndReport(ClassInfo underTest) {
- // Make a copy so we can remove them as we match them, making it easy to find additional ones
- Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
- for (MethodInfo methodInfo : methods.values()) {
- if (underTestMethods.contains(methodInfo)) {
- methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
- underTestMethods.remove(methodInfo);
- } else {
- jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+ ClassInfo(JarInfo jar, Class<?> clazz) {
+ this.jar = jar;
+ this.name = clazz.getName();
+ methods = new HashMap<>();
+
+ for (Method method : clazz.getMethods()) {
+ if (method.getDeclaringClass().equals(clazz)) {
+ LOG.debug("Processing method " + method.getName());
+ MethodInfo mi = new MethodInfo(this, method);
+ methods.put(mi.toString(), mi);
+ }
+ }
}
- }
- if (underTestMethods.size() > 0) {
- for (MethodInfo extra : underTestMethods) {
- jar.container.warnings.add(underTest + " contains extra method " + extra);
+ public JarInfo getJar() {
+ return jar;
}
- }
- }
- void patchUpBackMethodBackPointers() {
- for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
- }
+ public void setJar(JarInfo jar) {
+ this.jar = jar;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof ClassInfo)) return false;
- ClassInfo that = (ClassInfo)other;
- return name.equals(that.name); // Classes can be compared just on names
- }
+ public String getName() {
+ return name;
+ }
- @Override
- public int hashCode() {
- return name.hashCode();
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public String toString() {
- return jar + " " + name;
- }
- }
+ public Map<String, MethodInfo> getMethods() {
+ return methods;
+ }
- private static class MethodInfo {
- @JsonIgnore ClassInfo containingClass;
- String name;
- String returnType;
- List<String> args;
- Set<String> exceptions;
+ public void setMethods(Map<String, MethodInfo> methods) {
+ this.methods = methods;
+ }
- // For use by Jackson
- public MethodInfo() {
+ void compareAndReport(ClassInfo underTest) {
+ // Make a copy so we can remove them as we match them, making it easy to find additional ones
+ Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+ for (MethodInfo methodInfo : methods.values()) {
+ if (underTestMethods.contains(methodInfo)) {
+ methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+ underTestMethods.remove(methodInfo);
+ } else {
+ jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+ }
+ }
- }
+ if (underTestMethods.size() > 0) {
+ for (MethodInfo extra : underTestMethods) {
+ jar.container.warnings.add(underTest + " contains extra method " + extra);
+ }
+ }
+ }
- MethodInfo(ClassInfo containingClass, Method method) {
- this.containingClass = containingClass;
- this.name = method.getName();
- args = new ArrayList<>();
- for (Class<?> argClass : method.getParameterTypes()) {
- args.add(argClass.getName());
- }
- returnType = method.getReturnType().getName();
- exceptions = new HashSet<>();
- for (Class<?> exception : method.getExceptionTypes()) {
- exceptions.add(exception.getName());
- }
- }
+ void patchUpBackMethodBackPointers() {
+ for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+ }
- public ClassInfo getContainingClass() {
- return containingClass;
- }
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof ClassInfo)) return false;
+ ClassInfo that = (ClassInfo) other;
+ return name.equals(that.name); // Classes can be compared just on names
+ }
- public void setContainingClass(ClassInfo containingClass) {
- this.containingClass = containingClass;
- }
+ @Override
+ public int hashCode() {
+ return name.hashCode();
+ }
- public String getName() {
- return name;
+ @Override
+ public String toString() {
+ return jar + " " + name;
+ }
}
- public void setName(String name) {
- this.name = name;
- }
+ private static class MethodInfo {
+ @JsonIgnore
+ ClassInfo containingClass;
+ String name;
+ String returnType;
+ List<String> args;
+ Set<String> exceptions;
- public String getReturnType() {
- return returnType;
- }
+ // For use by Jackson
+ public MethodInfo() {
- public void setReturnType(String returnType) {
- this.returnType = returnType;
- }
+ }
- public List<String> getArgs() {
- return args;
- }
+ MethodInfo(ClassInfo containingClass, Method method) {
+ this.containingClass = containingClass;
+ this.name = method.getName();
+ args = new ArrayList<>();
+ for (Class<?> argClass : method.getParameterTypes()) {
+ args.add(argClass.getName());
+ }
+ returnType = method.getReturnType().getName();
+ exceptions = new HashSet<>();
+ for (Class<?> exception : method.getExceptionTypes()) {
+ exceptions.add(exception.getName());
+ }
+ }
- public void setArgs(List<String> args) {
- this.args = args;
- }
+ public ClassInfo getContainingClass() {
+ return containingClass;
+ }
- public Set<String> getExceptions() {
- return exceptions;
- }
+ public void setContainingClass(ClassInfo containingClass) {
+ this.containingClass = containingClass;
+ }
- public void setExceptions(Set<String> exceptions) {
- this.exceptions = exceptions;
- }
+ public String getName() {
+ return name;
+ }
- void compareAndReport(MethodInfo underTest) {
- // Check to see if they've added or removed exceptions
- // Make a copy so I can remove them as I check them off and easily find any that have been
- // added.
- Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
- for (String exception : exceptions) {
- if (underTest.exceptions.contains(exception)) {
- underTestExceptions.remove(exception);
- } else {
- containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
- underTest.containingClass + "." + name + " removes exception " + exception);
- }
- }
- if (underTestExceptions.size() > 0) {
- for (String underTestException : underTest.exceptions) {
- containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
- underTest.containingClass + "." + name + " adds exception " + underTestException);
- }
- }
- }
+ public void setName(String name) {
+ this.name = name;
+ }
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof MethodInfo)) return false;
- MethodInfo that = (MethodInfo)other;
+ public String getReturnType() {
+ return returnType;
+ }
- return containingClass.equals(that.containingClass) && name.equals(that.name) &&
- returnType.equals(that.returnType) && args.equals(that.args);
- }
+ public void setReturnType(String returnType) {
+ this.returnType = returnType;
+ }
- @Override
- public int hashCode() {
- return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
- args.hashCode();
- }
+ public List<String> getArgs() {
+ return args;
+ }
+
+ public void setArgs(List<String> args) {
+ this.args = args;
+ }
- @Override
- public String toString() {
- StringBuilder buf = new StringBuilder(returnType)
- .append(" ")
- .append(name)
- .append('(');
- boolean first = true;
- for (String arg : args) {
- if (first) first = false;
- else buf.append(", ");
- buf.append(arg);
- }
- buf.append(")");
- if (exceptions.size() > 0) {
- buf.append(" throws ");
- first = true;
- for (String exception : exceptions) {
- if (first) first = false;
- else buf.append(", ");
- buf.append(exception);
- }
- }
- return buf.toString();
+ public Set<String> getExceptions() {
+ return exceptions;
+ }
+
+ public void setExceptions(Set<String> exceptions) {
+ this.exceptions = exceptions;
+ }
+
+ void compareAndReport(MethodInfo underTest) {
+ // Check to see if they've added or removed exceptions
+ // Make a copy so I can remove them as I check them off and easily find any that have been
+ // added.
+ Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+ for (String exception : exceptions) {
+ if (underTest.exceptions.contains(exception)) {
+ underTestExceptions.remove(exception);
+ } else {
+ containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+ underTest.containingClass + "." + name + " removes exception " + exception);
+ }
+ }
+ if (underTestExceptions.size() > 0) {
+ for (String underTestException : underTest.exceptions) {
+ containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+ underTest.containingClass + "." + name + " adds exception " + underTestException);
+ }
+ }
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof MethodInfo)) return false;
+ MethodInfo that = (MethodInfo) other;
+
+ return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+ returnType.equals(that.returnType) && args.equals(that.args);
+ }
+
+ @Override
+ public int hashCode() {
+ return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+ args.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder buf = new StringBuilder(returnType)
+ .append(" ")
+ .append(name)
+ .append('(');
+ boolean first = true;
+ for (String arg : args) {
+ if (first) first = false;
+ else buf.append(", ");
+ buf.append(arg);
+ }
+ buf.append(")");
+ if (exceptions.size() > 0) {
+ buf.append(" throws ");
+ first = true;
+ for (String exception : exceptions) {
+ if (first) first = false;
+ else buf.append(", ");
+ buf.append(exception);
+ }
+ }
+ return buf.toString();
+ }
}
- }
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 6456cf2..ccc15eb 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -41,97 +41,98 @@ import java.net.URI;
import java.util.StringTokenizer;
public class HCatalogMR extends Configured implements Tool {
- private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
- private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
-
- @Override
- public int run(String[] args) throws Exception {
- String inputTable = null;
- String outputTable = null;
- String inputSchemaStr = null;
- String outputSchemaStr = null;
- for(int i = 0; i < args.length; i++){
- if(args[i].equalsIgnoreCase("-it")){
- inputTable = args[i+1];
- }else if(args[i].equalsIgnoreCase("-ot")){
- outputTable = args[i+1];
- }else if(args[i].equalsIgnoreCase("-is")){
- inputSchemaStr = args[i+1];
- }else if(args[i].equalsIgnoreCase("-os")){
- outputSchemaStr = args[i+1];
+ private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
+ private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
+
+ @Override
+ public int run(String[] args) throws Exception {
+ String inputTable = null;
+ String outputTable = null;
+ String inputSchemaStr = null;
+ String outputSchemaStr = null;
+ for (int i = 0; i < args.length; i++) {
+ if (args[i].equalsIgnoreCase("-it")) {
+ inputTable = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-ot")) {
+ outputTable = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-is")) {
+ inputSchemaStr = args[i + 1];
+ } else if (args[i].equalsIgnoreCase("-os")) {
+ outputSchemaStr = args[i + 1];
+ }
}
+
+ Configuration conf = getConf();
+ args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+ conf.set(INPUT_SCHEMA, inputSchemaStr);
+ conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+ Job job = new Job(conf, "bigtop_hcat_test");
+ HCatInputFormat.setInput(job, "default", inputTable);
+
+ job.setInputFormatClass(HCatInputFormat.class);
+ job.setJarByClass(HCatalogMR.class);
+ job.setMapperClass(Map.class);
+ job.setReducerClass(Reduce.class);
+ job.setMapOutputKeyClass(Text.class);
+ job.setMapOutputValueClass(IntWritable.class);
+ job.setOutputKeyClass(WritableComparable.class);
+ job.setOutputValueClass(HCatRecord.class);
+ HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+ HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+ job.setOutputFormatClass(HCatOutputFormat.class);
+
+ return job.waitForCompletion(true) ? 0 : 1;
+
+
}
-
- Configuration conf = getConf();
- args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
- conf.set(INPUT_SCHEMA, inputSchemaStr);
- conf.set(OUTPUT_SCHEMA, outputSchemaStr);
-
- Job job = new Job(conf, "bigtop_hcat_test");
- HCatInputFormat.setInput(job, "default", inputTable);
-
- job.setInputFormatClass(HCatInputFormat.class);
- job.setJarByClass(HCatalogMR.class);
- job.setMapperClass(Map.class);
- job.setReducerClass(Reduce.class);
- job.setMapOutputKeyClass(Text.class);
- job.setMapOutputValueClass(IntWritable.class);
- job.setOutputKeyClass(WritableComparable.class);
- job.setOutputValueClass(HCatRecord.class);
- HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
- HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
- job.setOutputFormatClass(HCatOutputFormat.class);
-
- return job.waitForCompletion(true) ? 0 : 1;
-
-
- }
- public static class Map extends Mapper<WritableComparable,
- HCatRecord, Text, IntWritable> {
- private final static IntWritable one = new IntWritable(1);
- private Text word = new Text();
- private HCatSchema inputSchema = null;
- @Override
- protected void map(WritableComparable key, HCatRecord value, Context context)
- throws IOException, InterruptedException {
- if (inputSchema == null) {
- inputSchema =
- HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
- }
- String line = value.getString("line", inputSchema);
- StringTokenizer tokenizer = new StringTokenizer(line);
- while (tokenizer.hasMoreTokens()) {
- word.set(tokenizer.nextToken());
- context.write(word, one);
- }
+ public static class Map extends Mapper<WritableComparable,
+ HCatRecord, Text, IntWritable> {
+ private final static IntWritable one = new IntWritable(1);
+ private Text word = new Text();
+ private HCatSchema inputSchema = null;
+
+ @Override
+ protected void map(WritableComparable key, HCatRecord value, Context context)
+ throws IOException, InterruptedException {
+ if (inputSchema == null) {
+ inputSchema =
+ HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+ }
+ String line = value.getString("line", inputSchema);
+ StringTokenizer tokenizer = new StringTokenizer(line);
+ while (tokenizer.hasMoreTokens()) {
+ word.set(tokenizer.nextToken());
+ context.write(word, one);
+ }
+ }
}
- }
- public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
- private HCatSchema outputSchema = null;
+ public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+ private HCatSchema outputSchema = null;
- @Override
- protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
- IOException, InterruptedException {
- if (outputSchema == null) {
- outputSchema =
- HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
- }
- int sum = 0;
- for (IntWritable i : values) {
- sum += i.get();
- }
- HCatRecord output = new DefaultHCatRecord(2);
- output.set("word", outputSchema, key);
- output.set("count", outputSchema, sum);
- context.write(null, output);
+ @Override
+ protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+ IOException, InterruptedException {
+ if (outputSchema == null) {
+ outputSchema =
+ HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+ }
+ int sum = 0;
+ for (IntWritable i : values) {
+ sum += i.get();
+ }
+ HCatRecord output = new DefaultHCatRecord(2);
+ output.set("word", outputSchema, key);
+ output.set("count", outputSchema, sum);
+ context.write(null, output);
+ }
}
- }
- public static void main(String[] args) throws Exception {
- int exitCode = ToolRunner.run(new HCatalogMR(), args);
- System.exit(exitCode);
- }
- }
+ public static void main(String[] args) throws Exception {
+ int exitCode = ToolRunner.run(new HCatalogMR(), args);
+ System.exit(exitCode);
+ }
+}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index ee20588..f722d63 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -34,88 +34,88 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class HiveHelper {
-
- private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
- public static Map<String, String> execCommand(CommandLine commandline) {
- return execCommand(commandline, null);
- }
+ private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
- public static Map<String, String> execCommand(CommandLine commandline,
- Map<String, String> envVars) {
-
- System.out.println("Executing command:");
- System.out.println(commandline.toString());
- Map<String, String> env = null;
- Map<String, String> entry = new HashMap<String, String>();
- try {
- env = EnvironmentUtils.getProcEnvironment();
- } catch (IOException e1) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to get process environment: "+ e1.getMessage());
- e1.printStackTrace();
- }
- if (envVars != null) {
- for (String key : envVars.keySet()) {
- env.put(key, envVars.get(key));
- }
- }
+ public static Map<String, String> execCommand(CommandLine commandline) {
+ return execCommand(commandline, null);
+ }
- DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
- ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
- Executor executor = new DefaultExecutor();
- executor.setExitValue(1);
- executor.setWatchdog(watchdog);
- executor.setStreamHandler(streamHandler);
- try {
- executor.execute(commandline, env, resultHandler);
- } catch (ExecuteException e) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString() + e.getMessage());
- e.printStackTrace();
- return entry;
- } catch (IOException e) {
- // TODO Auto-generated catch block
- LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString() + e.getMessage());
- e.printStackTrace();
- return entry;
- }
-
- try {
- resultHandler.waitFor();
- /*System.out.println("Command output: "+outputStream.toString());*/
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString());
- return entry;
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
+ public static Map<String, String> execCommand(CommandLine commandline,
+ Map<String, String> envVars) {
+
+ System.out.println("Executing command:");
+ System.out.println(commandline.toString());
+ Map<String, String> env = null;
+ Map<String, String> entry = new HashMap<String, String>();
+ try {
+ env = EnvironmentUtils.getProcEnvironment();
+ } catch (IOException e1) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to get process environment: " + e1.getMessage());
+ e1.printStackTrace();
+ }
+ if (envVars != null) {
+ for (String key : envVars.keySet()) {
+ env.put(key, envVars.get(key));
+ }
+ }
+
+ DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+ ExecuteWatchdog watchdog = new ExecuteWatchdog(60 * 10000);
+ Executor executor = new DefaultExecutor();
+ executor.setExitValue(1);
+ executor.setWatchdog(watchdog);
+ executor.setStreamHandler(streamHandler);
+ try {
+ executor.execute(commandline, env, resultHandler);
+ } catch (ExecuteException e) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString() + e.getMessage());
+ e.printStackTrace();
+ return entry;
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString() + e.getMessage());
+ e.printStackTrace();
+ return entry;
+ }
+
+ try {
+ resultHandler.waitFor();
+ /*System.out.println("Command output: "+outputStream.toString());*/
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString());
+ return entry;
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
/*System.out.println("Command output: "+outputStream.toString());*/
- LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
- LOG.debug("outputStream: "+ outputStream.toString());
- entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
- entry.put("outputStream", outputStream.toString());
- e.printStackTrace();
- return entry;
- }
- }
-
- protected static String getProperty(String property, String description) {
- String val = System.getProperty(property);
- if (val == null) {
- throw new RuntimeException("You must set the property " + property + " with " +
- description);
- }
- LOG.debug(description + " is " + val);
- return val;
- }
-
+ LOG.debug("exitValue: " + String.valueOf(resultHandler.getExitValue()));
+ LOG.debug("outputStream: " + outputStream.toString());
+ entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+ entry.put("outputStream", outputStream.toString());
+ e.printStackTrace();
+ return entry;
+ }
+ }
+
+ protected static String getProperty(String property, String description) {
+ String val = System.getProperty(property);
+ if (val == null) {
+ throw new RuntimeException("You must set the property " + property + " with " +
+ description);
+ }
+ LOG.debug(description + " is " + val);
+ return val;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 3b3ac51..35b9a3a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -28,52 +28,52 @@ import java.sql.SQLException;
import java.util.Properties;
public class JdbcConnector {
- private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+ private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
- protected static final String URL = "bigtop.test.hive.jdbc.url";
- protected static final String USER = "bigtop.test.hive.jdbc.user";
- protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
- protected static final String LOCATION = "bigtop.test.hive.location";
- protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
- protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
- protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
- protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
- protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
+ protected static final String URL = "bigtop.test.hive.jdbc.url";
+ protected static final String USER = "bigtop.test.hive.jdbc.user";
+ protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
+ protected static final String LOCATION = "bigtop.test.hive.location";
+ protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
+ protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
+ protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
+ protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
+ protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
- protected static Connection conn;
+ protected static Connection conn;
- @BeforeClass
- public static void connectToJdbc() throws SQLException {
- // Assume they've put the URL for the JDBC driver in an environment variable.
- String jdbcUrl = getProperty(URL, "the JDBC URL");
- String jdbcUser = getProperty(USER, "the JDBC user name");
- String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
+ @BeforeClass
+ public static void connectToJdbc() throws SQLException {
+ // Assume they've put the URL for the JDBC driver in an environment variable.
+ String jdbcUrl = getProperty(URL, "the JDBC URL");
+ String jdbcUser = getProperty(USER, "the JDBC user name");
+ String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
- Properties props = new Properties();
- props.put("user", jdbcUser);
- if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
- conn = DriverManager.getConnection(jdbcUrl, props);
- }
+ Properties props = new Properties();
+ props.put("user", jdbcUser);
+ if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
+ conn = DriverManager.getConnection(jdbcUrl, props);
+ }
- @AfterClass
- public static void closeJdbc() throws SQLException {
- if (conn != null) conn.close();
- }
+ @AfterClass
+ public static void closeJdbc() throws SQLException {
+ if (conn != null) conn.close();
+ }
- protected static String getProperty(String property, String description) {
- String val = System.getProperty(property);
- if (val == null) {
- throw new RuntimeException("You must set the property " + property + " with " +
- description);
+ protected static String getProperty(String property, String description) {
+ String val = System.getProperty(property);
+ if (val == null) {
+ throw new RuntimeException("You must set the property " + property + " with " +
+ description);
+ }
+ LOG.debug(description + " is " + val);
+ return val;
}
- LOG.debug(description + " is " + val);
- return val;
- }
- protected static boolean testActive(String property, String description) {
- String val = System.getProperty(property, "true");
- LOG.debug(description + " is " + val);
- return Boolean.valueOf(val);
- }
+ protected static boolean testActive(String property, String description) {
+ String val = System.getProperty(property, "true");
+ LOG.debug(description + " is " + val);
+ return Boolean.valueOf(val);
+ }
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index bc2ab77..85d824e 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -24,178 +24,204 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
+
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Map;
public class TestBeeline {
-
- public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-
- private static final String URL = "bigtop.test.hive.jdbc.url";
- private static final String USER = "bigtop.test.hive.jdbc.user";
- private static final String PASSWD = "bigtop.test.hive.jdbc.password";
-
- private static Map<String, String> results;
- private static String beelineUrl;
- private static String beelineUser;
- private static String beelinePasswd;
-
- //creating beeline base command with username and password as per inputs
- private static CommandLine beelineBaseCommand = new CommandLine("beeline");
-
- @BeforeClass
- public static void initialSetup(){
- TestBeeline.beelineUrl = System.getProperty(URL);
- TestBeeline.beelineUser = System.getProperty(USER);
- TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-
- if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "")
- {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
- }
- else if (beelineUser != null && beelineUser != "")
- {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
- }
- else {
- beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
- }
- LOG.info("URL is " + beelineUrl);
- LOG.info("User is " + beelineUser);
- LOG.info("Passwd is " + beelinePasswd);
- LOG.info("Passwd is null " + (beelinePasswd == null));
- }
-
- @Test
- public void checkBeeline() {
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineConnect(){
- try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); }
- catch (FileNotFoundException e1) {
- e1.printStackTrace();
- }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );
- }
-
- @Test
- public void checkBeelineHelp(){
- results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineQueryExecFromCmdLine(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
- }
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
- }
-
- @Test
- public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));
- }
-
- @Test
- public void checkBeelineInitFile() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
- try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
- try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_beeline_init")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));
- }
-
- @Test
- public void checkBeelineHiveVar() throws FileNotFoundException{
-
- try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
- try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
- try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-
- if(!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
- }else{
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
- }
-
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));
- }
-
- @Test
- public void checkBeelineFastConnect(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
- }
-
- @Test
- public void checkBeelineVerbose(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @Test
- public void checkBeelineShowHeader(){
- results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
- String consoleMsg = results.get("outputStream").toLowerCase();
- Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
- }
-
- @AfterClass
- public static void cleanup() throws FileNotFoundException {
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
- }
+
+ public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+
+ private static final String URL = "bigtop.test.hive.jdbc.url";
+ private static final String USER = "bigtop.test.hive.jdbc.user";
+ private static final String PASSWD = "bigtop.test.hive.jdbc.password";
+
+ private static Map<String, String> results;
+ private static String beelineUrl;
+ private static String beelineUser;
+ private static String beelinePasswd;
+
+ //creating beeline base command with username and password as per inputs
+ private static CommandLine beelineBaseCommand = new CommandLine("beeline");
+
+ @BeforeClass
+ public static void initialSetup() {
+ TestBeeline.beelineUrl = System.getProperty(URL);
+ TestBeeline.beelineUser = System.getProperty(USER);
+ TestBeeline.beelinePasswd = System.getProperty(PASSWD);
+
+ if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
+ } else if (beelineUser != null && beelineUser != "") {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+ } else {
+ beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
+ }
+ LOG.info("URL is " + beelineUrl);
+ LOG.info("User is " + beelineUser);
+ LOG.info("Passwd is " + beelinePasswd);
+ LOG.info("Passwd is null " + (beelinePasswd == null));
+ }
+
+ @Test
+ public void checkBeeline() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -u FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineConnect() {
+ try (PrintWriter out = new PrintWriter("connect.url")) {
+ out.println("!connect " + beelineUrl + " " + beelineUser + " " + beelinePasswd);
+ out.println("!quit");
+ } catch (FileNotFoundException e1) {
+ e1.printStackTrace();
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline !connect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineHelp() {
+ results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --help FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("display this message") && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineQueryExecFromCmdLine() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+ }
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -e FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
+ }
+
+ @Test
+ public void checkBeelineQueryExecFromFile() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-f1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-f4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -f FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineInitFile() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-i1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+ out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-i4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_beeline_init")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline -i FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineHiveVar() throws FileNotFoundException {
+
+ try (PrintWriter out = new PrintWriter("beeline-hv1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv2.sql")) {
+ out.println("CREATE DATABASE ${db};");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv3.sql")) {
+ out.println("DROP DATABASE ${db};");
+ out.println("CREATE DATABASE ${db};");
+ }
+ try (PrintWriter out = new PrintWriter("beeline-hv4.sql")) {
+ out.println("DROP DATABASE ${db};");
+ }
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+
+ if (!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")) {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql", false));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql", false));
+ }
+
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --hivevar FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql", false));
+ }
+
+ @Test
+ public void checkBeelineFastConnect() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --fastConnect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
+ }
+
+ @Test
+ public void checkBeelineVerbose() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --verbose FAILED." + results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @Test
+ public void checkBeelineShowHeader() {
+ results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+ String consoleMsg = results.get("outputStream").toLowerCase();
+ Assert.assertEquals("beeline --showHeader FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("default") && !consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+ }
+
+ @AfterClass
+ public static void cleanup() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+ }
}