You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:28:17 UTC

[46/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/5e342c45
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/5e342c45
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/5e342c45

Branch: refs/heads/master
Commit: 5e342c45364ec97f5e3530769a1cc8bdbcf69bb0
Parents: 77e0d6e
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 08:51:22 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:16 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/smoke-tests/odpi-runtime/README.md |  48 ++
 .../smoke-tests/odpi-runtime/build.gradle       |  63 +++
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 485 +++++++++++++++++
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 +++++
 .../src/main/resources/api-examiner-prep.sh     |  64 +++
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  | 275 ++++++++++
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 ++++
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  79 +++
 .../odpi/specs/runtime/hive/TestBeeline.java    | 201 +++++++
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 213 ++++++++
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 158 ++++++
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 545 +++++++++++++++++++
 .../org/odpi/specs/runtime/hive/TestSql.java    | 337 ++++++++++++
 .../org/odpi/specs/runtime/hive/TestThrift.java | 251 +++++++++
 .../src/test/python/find-public-apis.py         |  80 +++
 .../hadoop-common-2.7.3-api-report.json         |   1 +
 .../src/test/resources/hadoop-common-bin.list   |   2 +
 .../src/test/resources/hadoop-common-jar.list   |  60 ++
 .../src/test/resources/hadoop-common.list       | 230 ++++++++
 .../resources/hadoop-hdfs-2.7.3-api-report.json |   1 +
 .../src/test/resources/hadoop-hdfs-bin.list     |   1 +
 .../src/test/resources/hadoop-hdfs-jar.list     |  25 +
 .../src/test/resources/hadoop-hdfs.list         |  79 +++
 .../test/resources/hadoop-mapreduce-bin.list    |   1 +
 ...-mapreduce-client-core-2.7.3-api-report.json |   1 +
 .../test/resources/hadoop-mapreduce-jar.list    |  22 +
 .../src/test/resources/hadoop-mapreduce.list    | 123 +++++
 .../src/test/resources/hadoop-subprojs.list     |   4 +
 .../hadoop-yarn-api-2.7.3-api-report.json       |   1 +
 .../src/test/resources/hadoop-yarn-bin.list     |   3 +
 .../hadoop-yarn-client-2.7.3-api-report.json    |   1 +
 .../hadoop-yarn-common-2.7.3-api-report.json    |   1 +
 .../src/test/resources/hadoop-yarn-jar.list     |  38 ++
 .../src/test/resources/hadoop-yarn.list         |  74 +++
 .../test/resources/testRuntimeSpecConf.groovy   | 430 +++++++++++++++
 bigtop-tests/spec-tests/README.md               |  48 --
 bigtop-tests/spec-tests/build.gradle            |  63 ---
 bigtop-tests/spec-tests/runtime/build.gradle    |  63 ---
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 485 -----------------
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 -----
 .../src/main/resources/api-examiner-prep.sh     |  64 ---
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  | 275 ----------
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 ----
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  79 ---
 .../odpi/specs/runtime/hive/TestBeeline.java    | 201 -------
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 213 --------
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 158 ------
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 545 -------------------
 .../org/odpi/specs/runtime/hive/TestSql.java    | 337 ------------
 .../org/odpi/specs/runtime/hive/TestThrift.java | 251 ---------
 .../runtime/src/test/python/find-public-apis.py |  80 ---
 .../hadoop-common-2.7.3-api-report.json         |   1 -
 .../src/test/resources/hadoop-common-bin.list   |   2 -
 .../src/test/resources/hadoop-common-jar.list   |  60 --
 .../src/test/resources/hadoop-common.list       | 230 --------
 .../resources/hadoop-hdfs-2.7.3-api-report.json |   1 -
 .../src/test/resources/hadoop-hdfs-bin.list     |   1 -
 .../src/test/resources/hadoop-hdfs-jar.list     |  25 -
 .../runtime/src/test/resources/hadoop-hdfs.list |  79 ---
 .../test/resources/hadoop-mapreduce-bin.list    |   1 -
 ...-mapreduce-client-core-2.7.3-api-report.json |   1 -
 .../test/resources/hadoop-mapreduce-jar.list    |  22 -
 .../src/test/resources/hadoop-mapreduce.list    | 123 -----
 .../src/test/resources/hadoop-subprojs.list     |   4 -
 .../hadoop-yarn-api-2.7.3-api-report.json       |   1 -
 .../src/test/resources/hadoop-yarn-bin.list     |   3 -
 .../hadoop-yarn-client-2.7.3-api-report.json    |   1 -
 .../hadoop-yarn-common-2.7.3-api-report.json    |   1 -
 .../src/test/resources/hadoop-yarn-jar.list     |  38 --
 .../runtime/src/test/resources/hadoop-yarn.list |  74 ---
 .../test/resources/testRuntimeSpecConf.groovy   | 430 ---------------
 build.gradle                                    |   3 +-
 settings.gradle                                 |   5 -
 73 files changed, 4157 insertions(+), 4224 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/README.md
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/README.md b/bigtop-tests/smoke-tests/odpi-runtime/README.md
new file mode 100644
index 0000000..8fde997
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/README.md
@@ -0,0 +1,48 @@
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Test suite to validate Hadoop basic specifications
+==================================================
+
+The test suite is intended to be used as a validation tool to make sure that a
+Hadoop stack derived from Apache Bigtop is still compliant with it. The
+minimalistic way of doing so would be to guarantee compatibility of the
+environment, binaries layouts, certain configuration parameters, and so on.
+
+Validation test suite for the specs is vaguely based on Apache Bigtop iTest and
+consists of two essential parts: a configuration file, communicating the 
+functional commands and expected outcome(s) of it; and the test driver to run
+the commands and compare the results.
+ 
+Running the tests
+=================
+
+Tests could be executed by running the following command 
+```
+  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
+```
+=======
+consists of two essential parts: a configuration file, communicating the
+functional commands and expected outcome(s) of it; and the test driver to run
+the commands and compare the results.
+
+Running the tests
+=================
+
+Tests could be executed by running the following command
+```
+  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
+```
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
new file mode 100644
index 0000000..97e3635
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+def junitVersion = '4.11'
+
+apply plugin: 'java'
+
+repositories {
+  maven {
+    url "http://conjars.org/repo/"
+  }
+}
+dependencies {
+  compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
+  compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
+  compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3'
+  compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
+  compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
+  compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
+  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
+  compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
+  compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
+  testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
+  testCompile "junit:junit:4.11"
+  if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
+}
+
+jar {
+    from {
+        (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect {
+              zipTree(it)
+        }
+    }
+
+    exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
+}
+
+test {
+  // Change the default location where test data is picked up
+  systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
+  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
+  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
+}
+test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
new file mode 100644
index 0000000..d95c010
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -0,0 +1,485 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hadoop;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * A tool that generates API conformance tests for Hadoop libraries
+ */
+public class ApiExaminer {
+
+  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+  static private Set<String> unloadableClasses;
+
+  private List<String> errors;
+  private List<String> warnings;
+
+  static {
+    unloadableClasses = new HashSet<>();
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
+
+  }
+
+  public static void main(String[] args) {
+    Options options = new Options();
+
+    options.addOption("c", "compare", true,
+        "Compare against a spec, argument is the json file containing spec");
+    options.addOption("h", "help", false, "You're looking at it");
+    options.addOption("j", "jar", true, "Jar to examine");
+    options.addOption("p", "prepare-spec", true,
+        "Prepare the spec, argument is the directory to write the spec to");
+
+    try {
+      CommandLine cli = new GnuParser().parse(options, args);
+
+      if (cli.hasOption('h')) {
+        usage(options);
+        return;
+      }
+
+      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+          (cli.hasOption('c') && cli.hasOption('p'))) {
+        System.err.println("You must choose either -c or -p");
+        usage(options);
+        return;
+      }
+
+      if (!cli.hasOption('j')) {
+        System.err.println("You must specify the jar to prepare or compare");
+        usage(options);
+        return;
+      }
+
+      String jar = cli.getOptionValue('j');
+      ApiExaminer examiner = new ApiExaminer();
+
+      if (cli.hasOption('c')) {
+        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+      } else if (cli.hasOption('p')) {
+        examiner.prepareExpected(jar, cli.getOptionValue('p'));
+      }
+    } catch (Exception e) {
+      System.err.println("Received exception while processing");
+      e.printStackTrace();
+    }
+  }
+
+  private static void usage(Options options) {
+    HelpFormatter help = new HelpFormatter();
+    help.printHelp("api-examiner", options);
+
+  }
+
+  private ApiExaminer() {
+  }
+
+  private void prepareExpected(String jarFile, String outputDir) throws IOException,
+      ClassNotFoundException {
+    JarInfo jarInfo = new JarInfo(jarFile, this);
+    jarInfo.dumpToFile(new File(outputDir));
+  }
+
+  private void compareAgainstStandard(String json, String jarFile) throws IOException,
+      ClassNotFoundException {
+    errors = new ArrayList<>();
+    warnings = new ArrayList<>();
+    JarInfo underTest = new JarInfo(jarFile, this);
+    JarInfo standard = jarInfoFromFile(new File(json));
+    standard.compareAndReport(underTest);
+
+    if (errors.size() > 0) {
+      System.err.println("Found " + errors.size() + " incompatibilities:");
+      for (String error : errors) {
+        System.err.println(error);
+      }
+    }
+
+    if (warnings.size() > 0) {
+      System.err.println("Found " + warnings.size() + " possible issues: ");
+      for (String warning : warnings) {
+        System.err.println(warning);
+      }
+    }
+
+
+  }
+
+  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+    jarInfo.patchUpClassBackPointers(this);
+    return jarInfo;
+  }
+
+  private static class JarInfo {
+    String name;
+    String version;
+    ApiExaminer container;
+    Map<String, ClassInfo> classes;
+
+    // For use by Jackson
+    public JarInfo() {
+
+    }
+
+    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+      this.container = container;
+      LOG.info("Processing jar " + jarFile);
+      File f = new File(jarFile);
+      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+      Matcher matcher = pattern.matcher(f.getName());
+      if (!matcher.matches()) {
+        String msg = "Unable to determine name and version from " + f.getName();
+        LOG.error(msg);
+        throw new RuntimeException(msg);
+      }
+      name = matcher.group(1);
+      version = matcher.group(2);
+      classes = new HashMap<>();
+
+      JarFile jar = new JarFile(jarFile);
+      Enumeration<JarEntry> entries = jar.entries();
+      while (entries.hasMoreElements()) {
+        String name = entries.nextElement().getName();
+        if (name.endsWith(".class")) {
+          name = name.substring(0, name.length() - 6);
+          name = name.replace('/', '.');
+          if (!unloadableClasses.contains(name)) {
+            LOG.debug("Processing class " + name);
+            Class<?> clazz = Class.forName(name);
+            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+              classes.put(name, new ClassInfo(this, clazz));
+            }
+          }
+        }
+      }
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getVersion() {
+      return version;
+    }
+
+    public void setVersion(String version) {
+      this.version = version;
+    }
+
+    public Map<String, ClassInfo> getClasses() {
+      return classes;
+    }
+
+    public void setClasses(Map<String, ClassInfo> classes) {
+      this.classes = classes;
+    }
+
+    void compareAndReport(JarInfo underTest) {
+      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+      for (ClassInfo classInfo : classes.values()) {
+        if (underTestClasses.contains(classInfo)) {
+          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+          underTestClasses.remove(classInfo);
+        } else {
+          container.errors.add(underTest + " does not contain class " + classInfo);
+        }
+      }
+
+      if (underTestClasses.size() > 0) {
+        for (ClassInfo extra : underTestClasses) {
+          container.warnings.add(underTest + " contains extra class " + extra);
+        }
+      }
+    }
+
+    void dumpToFile(File outputDir) throws IOException {
+      File output = new File(outputDir, name + "-" + version + "-api-report.json");
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.writeValue(output, this);
+    }
+
+    void patchUpClassBackPointers(ApiExaminer container) {
+      this.container = container;
+      for (ClassInfo classInfo : classes.values()) {
+        classInfo.setJar(this);
+        classInfo.patchUpBackMethodBackPointers();
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof JarInfo)) return false;
+      JarInfo that = (JarInfo)other;
+      return name.equals(that.name) && version.equals(that.version);
+    }
+
+    @Override
+    public String toString() {
+      return name + "-" + version;
+    }
+  }
+
+  private static class ClassInfo {
+    @JsonIgnore JarInfo jar;
+    String name;
+    Map<String, MethodInfo> methods;
+
+    // For use by Jackson
+    public ClassInfo() {
+
+    }
+
+    ClassInfo(JarInfo jar, Class<?> clazz) {
+      this.jar = jar;
+      this.name = clazz.getName();
+      methods = new HashMap<>();
+
+      for (Method method : clazz.getMethods()) {
+        if (method.getDeclaringClass().equals(clazz)) {
+          LOG.debug("Processing method " + method.getName());
+          MethodInfo mi = new MethodInfo(this, method);
+          methods.put(mi.toString(), mi);
+        }
+      }
+    }
+
+    public JarInfo getJar() {
+      return jar;
+    }
+
+    public void setJar(JarInfo jar) {
+      this.jar = jar;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public Map<String, MethodInfo> getMethods() {
+      return methods;
+    }
+
+    public void setMethods(Map<String, MethodInfo> methods) {
+      this.methods = methods;
+    }
+
+    void compareAndReport(ClassInfo underTest) {
+      // Make a copy so we can remove them as we match them, making it easy to find additional ones
+      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+      for (MethodInfo methodInfo : methods.values()) {
+        if (underTestMethods.contains(methodInfo)) {
+          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+          underTestMethods.remove(methodInfo);
+        } else {
+          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+        }
+      }
+
+      if (underTestMethods.size() > 0) {
+        for (MethodInfo extra : underTestMethods) {
+          jar.container.warnings.add(underTest + " contains extra method " + extra);
+        }
+      }
+    }
+
+    void patchUpBackMethodBackPointers() {
+      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof ClassInfo)) return false;
+      ClassInfo that = (ClassInfo)other;
+      return name.equals(that.name);  // Classes can be compared just on names
+    }
+
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      return jar + " " + name;
+    }
+  }
+
+  private static class MethodInfo {
+    @JsonIgnore ClassInfo containingClass;
+    String name;
+    String returnType;
+    List<String> args;
+    Set<String> exceptions;
+
+    // For use by Jackson
+    public MethodInfo() {
+
+    }
+
+    MethodInfo(ClassInfo containingClass, Method method) {
+      this.containingClass = containingClass;
+      this.name = method.getName();
+      args = new ArrayList<>();
+      for (Class<?> argClass : method.getParameterTypes()) {
+        args.add(argClass.getName());
+      }
+      returnType = method.getReturnType().getName();
+      exceptions = new HashSet<>();
+      for (Class<?> exception : method.getExceptionTypes()) {
+        exceptions.add(exception.getName());
+      }
+    }
+
+    public ClassInfo getContainingClass() {
+      return containingClass;
+    }
+
+    public void setContainingClass(ClassInfo containingClass) {
+      this.containingClass = containingClass;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getReturnType() {
+      return returnType;
+    }
+
+    public void setReturnType(String returnType) {
+      this.returnType = returnType;
+    }
+
+    public List<String> getArgs() {
+      return args;
+    }
+
+    public void setArgs(List<String> args) {
+      this.args = args;
+    }
+
+    public Set<String> getExceptions() {
+      return exceptions;
+    }
+
+    public void setExceptions(Set<String> exceptions) {
+      this.exceptions = exceptions;
+    }
+
+    void compareAndReport(MethodInfo underTest) {
+      // Check to see if they've added or removed exceptions
+      // Make a copy so I can remove them as I check them off and easily find any that have been
+      // added.
+      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+      for (String exception : exceptions) {
+        if (underTest.exceptions.contains(exception)) {
+          underTestExceptions.remove(exception);
+        } else {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " removes exception " + exception);
+        }
+      }
+      if (underTestExceptions.size() > 0) {
+        for (String underTestException : underTest.exceptions) {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " adds exception " + underTestException);
+        }
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof MethodInfo)) return false;
+      MethodInfo that = (MethodInfo)other;
+
+      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+          returnType.equals(that.returnType) && args.equals(that.args);
+    }
+
+    @Override
+    public int hashCode() {
+      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+          args.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder buf = new StringBuilder(returnType)
+          .append(" ")
+          .append(name)
+          .append('(');
+      boolean first = true;
+      for (String arg : args) {
+        if (first) first = false;
+        else buf.append(", ");
+        buf.append(arg);
+      }
+      buf.append(")");
+      if (exceptions.size() > 0) {
+        buf.append(" throws ");
+        first = true;
+        for (String exception : exceptions) {
+          if (first) first = false;
+          else buf.append(", ");
+          buf.append(exception);
+        }
+      }
+      return buf.toString();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
new file mode 100644
index 0000000..4110d5d
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.hcatalog.data.DefaultHCatRecord;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.StringTokenizer;
+
+public class HCatalogMR extends Configured implements Tool {
+  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
+  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
+
+  @Override
+  public int run(String[] args) throws Exception {
+    String inputTable = null;
+    String outputTable = null;
+    String inputSchemaStr = null;
+    String outputSchemaStr = null;
+    for(int i = 0; i < args.length; i++){
+        if(args[i].equalsIgnoreCase("-it")){
+            inputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-ot")){
+            outputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-is")){
+            inputSchemaStr = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-os")){
+            outputSchemaStr = args[i+1];
+        }
+    }
+    
+    Configuration conf = getConf();
+    args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+    conf.set(INPUT_SCHEMA, inputSchemaStr);
+    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+    Job job = new Job(conf, "odpi_hcat_test");
+    HCatInputFormat.setInput(job, "default", inputTable);
+
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setJarByClass(HCatalogMR.class);
+    job.setMapperClass(Map.class);
+    job.setReducerClass(Reduce.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    job.setOutputKeyClass(WritableComparable.class);
+    job.setOutputValueClass(HCatRecord.class);
+    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+    job.setOutputFormatClass(HCatOutputFormat.class);
+
+    return job.waitForCompletion(true) ? 0 : 1;
+
+
+  }
+  public static class Map extends Mapper<WritableComparable,
+          HCatRecord, Text, IntWritable> {
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+    private HCatSchema inputSchema = null;
+
+    @Override
+    protected void map(WritableComparable key, HCatRecord value, Context context)
+        throws IOException, InterruptedException {
+      if (inputSchema == null) {
+        inputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+      }
+      String line = value.getString("line", inputSchema);
+      StringTokenizer tokenizer = new StringTokenizer(line);
+      while (tokenizer.hasMoreTokens()) {
+        word.set(tokenizer.nextToken());
+        context.write(word, one);
+      }
+    }
+  }
+
+  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+    private HCatSchema outputSchema = null;
+
+    @Override
+    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+        IOException, InterruptedException {
+      if (outputSchema == null) {
+        outputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+      }
+      int sum = 0;
+      for (IntWritable i : values) {
+        sum += i.get();
+      }
+      HCatRecord output = new DefaultHCatRecord(2);
+      output.set("word", outputSchema, key);
+      output.set("count", outputSchema, sum);
+      context.write(null, output);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    int exitCode = ToolRunner.run(new HCatalogMR(), args);
+    System.exit(exitCode);
+  }
+ }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
new file mode 100755
index 0000000..8c9ab5e
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+############################################################################
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+############################################################################
+
+############################################################################
+# This script is used to generate the hadoop-*-api.report.json files in the
+# test/resources directory.  To use it, you will first need to download an
+# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the
+# directory where you untar that distribution.  You will then need to set
+# BIGTTOP_HOME to the directory where your bigtop source is located.  Then
+# run this script for each of the jars you want to generate a report for.
+# The arguments passed to this script should be -p <outputdir> -j <jarfile>
+# where outputdir is the directory you'd like to write the report to and
+# jarfile is the full path of the jar to generate the report for.  Reports
+# should be generated for the following jars: hadoop-common, hadoop-hdfs,
+# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and
+# hadoop-mapreduce-client-core
+#
+# Example usage:
+# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3
+# export BIGTOP_HOME=/home/me/git/bigtop
+# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
+#
+# The resulting reports should be committed to git.  This script only needs
+# to be run once per ODPi release.
+############################################################################
+
+
+if [ "x${APACHE_HADOOP_DIR}" = "x" ]
+then
+    echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"
+    exit 1
+fi
+
+if [ "x${BIGTOP_HOME}" = "x" ]
+then
+    echo "You must set BIGTOP_HOME to the root directory for your bigtop source"
+    exit 1
+fi
+
+for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+for jar in `find $APACHE_HADOOP_DIR -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
new file mode 100644
index 0000000..bc2a3b2
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime
+
+import groovy.io.FileType
+import org.junit.Assert
+import org.apache.bigtop.itest.shell.*
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.Parameterized
+import org.junit.runners.Parameterized.Parameters
+
+import java.util.regex.Matcher
+import java.util.regex.Pattern
+
+/**
+ * Check all expected environment
+ * Tests are constructed dynamically, using external DSL to define
+ * - test name
+ * - test type
+ * - command to execute the test
+ * - expected pattern of the output
+ */
+@RunWith(Parameterized.class)
+public class TestSpecsRuntime {
+  private String testName
+  private String type
+  private Map arguments
+
+  private static ENV = System.getenv()
+
+  @Parameters(name="{0}")
+  public static Collection<Object[]> allTests() {
+    List<Object[]> specs = [];
+
+    config.specs.tests.each { test ->
+      specs.add([test.value.name, test.value.type, test.value.arguments] as Object[])
+    }
+    return specs
+  }
+
+  public TestSpecsRuntime (String testName, String type, Map arguments) {
+    this.testName = testName
+    this.type = type
+    this.arguments = arguments
+  }
+
+  public static final String testsList = System.properties['test.resources.dir'] ?:
+      "${System.properties['buildDir']}/resources/test"
+  def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}"))
+
+  private static String getTestConfigName() {
+    return "$testsList/testRuntimeSpecConf.groovy";
+  }
+
+  private Map getEnvMap(String command) {
+    def envMap = [:]
+    Shell sh = new Shell()
+    def envvars = sh.exec(command).getOut()
+    if (sh.getRet() == 0) {
+      envvars.each {
+        def match = it =~ /(?<variable>[^=]+)='(?<value>[^']+)'$/
+        if ( match.matches() ) {
+          envMap[match.group('variable')] = match.group('value')
+        }
+      }
+    }
+    return envMap
+  }
+
+  private String getEnv(String name, String cmd) {
+    String value = ENV[name]
+    if (value == null) {
+       value = getEnvMap(cmd)[name]
+    }
+    return value
+  }
+
+  @Test
+  public void testAll() {
+    switch (type) {
+      case 'shell':
+        Shell sh = new Shell()
+        def output = sh.exec(arguments['command']).getOut().join("\n")
+        int actualResult = sh.getRet()
+        int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code
+        Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}",
+            actualResult == expectedResult)
+        break
+
+      case 'envdir':
+        def var = arguments['variable']
+        def isPathRelative = arguments['relative']
+        def pathString = getEnv(var, arguments['envcmd'])
+        Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null )
+
+        if ( arguments['pattern'] ) {
+            Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern",
+                pathString ==~ /${arguments['pattern']}/)
+        }
+
+        def pathFile = new File(pathString)
+        if ( isPathRelative ) {
+            Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() )
+        } else {
+            if (!arguments['donotcheckexistance']) {
+              Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() )
+              Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() )
+            }
+        }
+        break
+
+      case 'dirstruct':
+        def expectedFiles = []
+        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
+           expectedFiles << ~line
+        }
+        def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue",
+          baseDirEnv)
+        def root = new File(baseDirEnv)
+        def actualFiles = []
+        def missingFiles = []
+        if ( ! root.exists() ) {
+          Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!");
+        }
+
+        root.eachFileRecurse(FileType.ANY) { file ->
+          def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path
+          actualFiles << relPath
+        }
+
+        expectedFiles.each { wantFile ->
+          def ok = false
+          for (def x : actualFiles) {
+            if (actualFiles =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (!ok) {
+            missingFiles << wantFile
+          }
+        }
+
+        Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ",
+          missingFiles.size() == 0)
+        break
+
+      case 'dircontent':
+        def expectedFiles = []
+        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
+          expectedFiles << ~line
+        }
+
+        def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        def subDir = arguments['subDir']
+        if (!subDir && arguments['subDirEnv']) {
+          subDir = getEnv(arguments['subDirEnv'], arguments['envcmd'])
+        }
+
+        def dir = null
+        if (subDir) {
+          dir = new File(baseDir, subDir)
+        } else {
+          dir = new File(baseDir)
+        }
+        Assert.assertNotNull("Directory has to be set for the test to continue", dir)
+
+        def actualFiles = []
+        if (dir.exists()) {
+          dir.eachFile FileType.FILES, { file ->
+            def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path
+            actualFiles << relPath
+          }
+        }
+
+        def missingList = []
+        for (def wantFile : expectedFiles) {
+          def ok = false
+          for (def haveFile : actualFiles) {
+            if (haveFile =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (! ok) {
+            missingList << wantFile
+          }
+        }
+
+        def extraList = []
+        for (def haveFile : actualFiles) {
+          def ok = false
+          for (def wantFile : expectedFiles) {
+            if (haveFile =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (! ok) {
+            extraList << haveFile
+          }
+        }
+
+        def commonFiles = actualFiles.intersect(expectedFiles)
+        Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}",
+           missingList.size() == 0 && extraList.size() == 0)
+        break
+      case 'hadoop_tools':
+        def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars")
+        Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr)
+
+        def toolsPath = new File(toolsPathStr)
+        Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute())
+
+        Shell sh = new Shell()
+        def classPath = sh.exec("hadoop classpath").getOut().join("\n")
+        Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0)
+
+        Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath",
+          classPath.split(File.pathSeparator).any {
+            new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/
+          }
+        )
+        break
+      case 'api_examination':
+        def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        def libdir = getEnv(arguments['libDir'], arguments['envcmd'])
+
+        def dir = new File(basedir + "/" + libdir)
+        Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory())
+        def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar")
+        def String[] jars = dir.list(new FilenameFilter() {
+          @Override
+          boolean accept(File d, String name) {
+            Matcher matcher = pattern.matcher(name)
+            return (matcher.matches() && !name.contains("test"))
+          }
+        })
+        Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
+        def jar = dir.getAbsolutePath() + "/" + jars[0]
+
+        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
+        def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
+        Shell sh = new Shell()
+        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
+        int rc = sh.getRet()
+        Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
+        if (results.size() > 0) {
+          System.out.println("Received report for jar " + arguments['jar'] + results.join("\n"))
+        }
+        break;
+
+
+      default:
+        break
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
new file mode 100644
index 0000000..3e56224
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecuteResultHandler;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.exec.environment.EnvironmentUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HiveHelper {
+	
+	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
+
+	public static Map<String, String> execCommand(CommandLine commandline) {
+		return execCommand(commandline, null);
+	}
+
+	public static Map<String, String> execCommand(CommandLine commandline,
+																								Map<String, String> envVars) {
+		
+		System.out.println("Executing command:");
+		System.out.println(commandline.toString());
+		Map<String, String> env = null;
+		Map<String, String> entry = new HashMap<String, String>();
+		try {
+			env = EnvironmentUtils.getProcEnvironment();
+		} catch (IOException e1) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to get process environment: "+ e1.getMessage());
+			e1.printStackTrace();
+		}
+		if (envVars != null) {
+			for (String key : envVars.keySet()) {
+				env.put(key, envVars.get(key));
+			}
+		}
+
+		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
+		Executor executor = new DefaultExecutor();
+		executor.setExitValue(1);
+		executor.setWatchdog(watchdog);
+		executor.setStreamHandler(streamHandler);
+		try {
+			executor.execute(commandline, env, resultHandler);
+		} catch (ExecuteException e) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
+			e.printStackTrace();
+			return entry;
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
+			e.printStackTrace();
+			return entry;
+		}
+		
+		try {
+			resultHandler.waitFor();
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			return entry;
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			e.printStackTrace();		
+			return entry;
+		}
+	}
+	
+	protected static String getProperty(String property, String description) {
+		String val = System.getProperty(property);
+		if (val == null) {
+			throw new RuntimeException("You must set the property " + property + " with " +
+				description);
+		}
+		LOG.debug(description + " is " + val);
+		return val;
+	 }
+	
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
new file mode 100644
index 0000000..7512dab
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.Properties;
+
+public class JdbcConnector {
+  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+
+  protected static final String URL = "odpi.test.hive.jdbc.url";
+  protected static final String USER = "odpi.test.hive.jdbc.user";
+  protected static final String PASSWD = "odpi.test.hive.jdbc.password";
+  protected static final String LOCATION = "odpi.test.hive.location";
+  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
+  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
+  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
+  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
+  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
+
+  protected static Connection conn;
+
+  @BeforeClass
+  public static void connectToJdbc() throws SQLException {
+    // Assume they've put the URL for the JDBC driver in an environment variable.
+    String jdbcUrl = getProperty(URL, "the JDBC URL");
+    String jdbcUser = getProperty(USER, "the JDBC user name");
+    String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
+
+    Properties props = new Properties();
+    props.put("user", jdbcUser);
+    if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
+    conn = DriverManager.getConnection(jdbcUrl, props);
+  }
+
+  @AfterClass
+  public static void closeJdbc() throws SQLException {
+    if (conn != null) conn.close();
+  }
+
+  protected static String getProperty(String property, String description) {
+    String val = System.getProperty(property);
+    if (val == null) {
+      throw new RuntimeException("You must set the property " + property + " with " +
+          description);
+    }
+    LOG.debug(description + " is " + val);
+    return val;
+  }
+
+  protected static boolean testActive(String property, String description) {
+    String val = System.getProperty(property, "true");
+    LOG.debug(description + " is " + val);
+    return Boolean.valueOf(val);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
new file mode 100644
index 0000000..578621a
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -0,0 +1,201 @@
+package org.odpi.specs.runtime.hive;
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class TestBeeline {
+	
+	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+	
+	private static final String URL = "odpi.test.hive.jdbc.url";
+	private static final String USER = "odpi.test.hive.jdbc.user";
+	private static final String PASSWD = "odpi.test.hive.jdbc.password";
+	
+	private static Map<String, String> results;
+	private static String beelineUrl; 
+	private static String beelineUser;
+	private static String beelinePasswd;
+	
+	//creating beeline base command with username and password as per inputs
+	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
+
+	@BeforeClass
+	public static void initialSetup(){
+		TestBeeline.beelineUrl = System.getProperty(URL);
+		TestBeeline.beelineUser = System.getProperty(USER);
+		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
+
+		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
+		{ 
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
+		}
+		else if (beelineUser != null && beelineUser != "") 
+		{ 
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+		}
+		else {
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
+		}
+		LOG.info("URL is " + beelineUrl); 
+		LOG.info("User is " + beelineUser);
+		LOG.info("Passwd is " + beelinePasswd); 
+		LOG.info("Passwd is null " + (beelinePasswd == null));
+	}
+
+	@Test
+	public void checkBeeline() {
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineConnect(){
+		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
+		catch (FileNotFoundException e1) {
+			e1.printStackTrace();
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
+	}
+	
+	@Test
+	public void checkBeelineHelp(){
+		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineQueryExecFromCmdLine(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		}
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
+		
+		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
+		
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
+
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
+	}
+	
+	@Test
+	public void checkBeelineInitFile() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
+	
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
+	}
+	
+	@Test
+	public void checkBeelineHiveVar() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+	
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
+	}
+	
+	@Test
+	public void checkBeelineFastConnect(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
+	}
+
+	@Test
+	public void checkBeelineVerbose(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineShowHeader(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+
+	@AfterClass
+	public static void cleanup() throws FileNotFoundException {
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+	}
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
new file mode 100644
index 0000000..2b70909
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.AfterClass;
+import org.junit.Assert;
+
+public class TestCLI {
+	
+	static Map<String, String> results;
+	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
+	
+	@BeforeClass
+	public static void setup(){
+		
+		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+	}
+	
+	@Test
+	public void help(){		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+		//LOG.info(results.get("exitValue"));
+		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+	}
+	 
+	@Test
+	public void sqlFromCmdLine(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void sqlFromFiles() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void silent() {
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+	}
+	
+	@Test
+	public void verbose(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
+	}
+	
+	@Test
+	public void initialization() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void database(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void hiveConf(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+	}
+	
+	@Test
+	public void variableSubsitution() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void hiveVar() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@AfterClass
+	public static void cleanup(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+	}
+	 
+}