You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:28:09 UTC

[38/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
deleted file mode 100644
index bc2a3b2..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime
-
-import groovy.io.FileType
-import org.junit.Assert
-import org.apache.bigtop.itest.shell.*
-import org.junit.Test
-import org.junit.runner.RunWith
-import org.junit.runners.Parameterized
-import org.junit.runners.Parameterized.Parameters
-
-import java.util.regex.Matcher
-import java.util.regex.Pattern
-
-/**
- * Check all expected environment
- * Tests are constructed dynamically, using external DSL to define
- * - test name
- * - test type
- * - command to execute the test
- * - expected pattern of the output
- */
-@RunWith(Parameterized.class)
-public class TestSpecsRuntime {
-  private String testName
-  private String type
-  private Map arguments
-
-  private static ENV = System.getenv()
-
-  @Parameters(name="{0}")
-  public static Collection<Object[]> allTests() {
-    List<Object[]> specs = [];
-
-    config.specs.tests.each { test ->
-      specs.add([test.value.name, test.value.type, test.value.arguments] as Object[])
-    }
-    return specs
-  }
-
-  public TestSpecsRuntime (String testName, String type, Map arguments) {
-    this.testName = testName
-    this.type = type
-    this.arguments = arguments
-  }
-
-  public static final String testsList = System.properties['test.resources.dir'] ?:
-      "${System.properties['buildDir']}/resources/test"
-  def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}"))
-
-  private static String getTestConfigName() {
-    return "$testsList/testRuntimeSpecConf.groovy";
-  }
-
-  private Map getEnvMap(String command) {
-    def envMap = [:]
-    Shell sh = new Shell()
-    def envvars = sh.exec(command).getOut()
-    if (sh.getRet() == 0) {
-      envvars.each {
-        def match = it =~ /(?<variable>[^=]+)='(?<value>[^']+)'$/
-        if ( match.matches() ) {
-          envMap[match.group('variable')] = match.group('value')
-        }
-      }
-    }
-    return envMap
-  }
-
-  private String getEnv(String name, String cmd) {
-    String value = ENV[name]
-    if (value == null) {
-       value = getEnvMap(cmd)[name]
-    }
-    return value
-  }
-
-  @Test
-  public void testAll() {
-    switch (type) {
-      case 'shell':
-        Shell sh = new Shell()
-        def output = sh.exec(arguments['command']).getOut().join("\n")
-        int actualResult = sh.getRet()
-        int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code
-        Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}",
-            actualResult == expectedResult)
-        break
-
-      case 'envdir':
-        def var = arguments['variable']
-        def isPathRelative = arguments['relative']
-        def pathString = getEnv(var, arguments['envcmd'])
-        Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null )
-
-        if ( arguments['pattern'] ) {
-            Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern",
-                pathString ==~ /${arguments['pattern']}/)
-        }
-
-        def pathFile = new File(pathString)
-        if ( isPathRelative ) {
-            Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() )
-        } else {
-            if (!arguments['donotcheckexistance']) {
-              Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() )
-              Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() )
-            }
-        }
-        break
-
-      case 'dirstruct':
-        def expectedFiles = []
-        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
-           expectedFiles << ~line
-        }
-        def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue",
-          baseDirEnv)
-        def root = new File(baseDirEnv)
-        def actualFiles = []
-        def missingFiles = []
-        if ( ! root.exists() ) {
-          Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!");
-        }
-
-        root.eachFileRecurse(FileType.ANY) { file ->
-          def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path
-          actualFiles << relPath
-        }
-
-        expectedFiles.each { wantFile ->
-          def ok = false
-          for (def x : actualFiles) {
-            if (actualFiles =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (!ok) {
-            missingFiles << wantFile
-          }
-        }
-
-        Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ",
-          missingFiles.size() == 0)
-        break
-
-      case 'dircontent':
-        def expectedFiles = []
-        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
-          expectedFiles << ~line
-        }
-
-        def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        def subDir = arguments['subDir']
-        if (!subDir && arguments['subDirEnv']) {
-          subDir = getEnv(arguments['subDirEnv'], arguments['envcmd'])
-        }
-
-        def dir = null
-        if (subDir) {
-          dir = new File(baseDir, subDir)
-        } else {
-          dir = new File(baseDir)
-        }
-        Assert.assertNotNull("Directory has to be set for the test to continue", dir)
-
-        def actualFiles = []
-        if (dir.exists()) {
-          dir.eachFile FileType.FILES, { file ->
-            def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path
-            actualFiles << relPath
-          }
-        }
-
-        def missingList = []
-        for (def wantFile : expectedFiles) {
-          def ok = false
-          for (def haveFile : actualFiles) {
-            if (haveFile =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (! ok) {
-            missingList << wantFile
-          }
-        }
-
-        def extraList = []
-        for (def haveFile : actualFiles) {
-          def ok = false
-          for (def wantFile : expectedFiles) {
-            if (haveFile =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (! ok) {
-            extraList << haveFile
-          }
-        }
-
-        def commonFiles = actualFiles.intersect(expectedFiles)
-        Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}",
-           missingList.size() == 0 && extraList.size() == 0)
-        break
-      case 'hadoop_tools':
-        def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars")
-        Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr)
-
-        def toolsPath = new File(toolsPathStr)
-        Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute())
-
-        Shell sh = new Shell()
-        def classPath = sh.exec("hadoop classpath").getOut().join("\n")
-        Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0)
-
-        Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath",
-          classPath.split(File.pathSeparator).any {
-            new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/
-          }
-        )
-        break
-      case 'api_examination':
-        def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        def libdir = getEnv(arguments['libDir'], arguments['envcmd'])
-
-        def dir = new File(basedir + "/" + libdir)
-        Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory())
-        def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar")
-        def String[] jars = dir.list(new FilenameFilter() {
-          @Override
-          boolean accept(File d, String name) {
-            Matcher matcher = pattern.matcher(name)
-            return (matcher.matches() && !name.contains("test"))
-          }
-        })
-        Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
-        def jar = dir.getAbsolutePath() + "/" + jars[0]
-
-        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
-        def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
-        Shell sh = new Shell()
-        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
-        int rc = sh.getRet()
-        Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
-        if (results.size() > 0) {
-          System.out.println("Received report for jar " + arguments['jar'] + results.join("\n"))
-        }
-        break;
-
-
-      default:
-        break
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
deleted file mode 100644
index 3e56224..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecuteResultHandler;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteWatchdog;
-import org.apache.commons.exec.Executor;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.exec.environment.EnvironmentUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class HiveHelper {
-	
-	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
-
-	public static Map<String, String> execCommand(CommandLine commandline) {
-		return execCommand(commandline, null);
-	}
-
-	public static Map<String, String> execCommand(CommandLine commandline,
-																								Map<String, String> envVars) {
-		
-		System.out.println("Executing command:");
-		System.out.println(commandline.toString());
-		Map<String, String> env = null;
-		Map<String, String> entry = new HashMap<String, String>();
-		try {
-			env = EnvironmentUtils.getProcEnvironment();
-		} catch (IOException e1) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to get process environment: "+ e1.getMessage());
-			e1.printStackTrace();
-		}
-		if (envVars != null) {
-			for (String key : envVars.keySet()) {
-				env.put(key, envVars.get(key));
-			}
-		}
-
-		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
-		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
-		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
-		Executor executor = new DefaultExecutor();
-		executor.setExitValue(1);
-		executor.setWatchdog(watchdog);
-		executor.setStreamHandler(streamHandler);
-		try {
-			executor.execute(commandline, env, resultHandler);
-		} catch (ExecuteException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		}
-		
-		try {
-			resultHandler.waitFor();
-			/*System.out.println("Command output: "+outputStream.toString());*/
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			return entry;
-		} catch (InterruptedException e) {
-			// TODO Auto-generated catch block
-			/*System.out.println("Command output: "+outputStream.toString());*/
-			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			e.printStackTrace();		
-			return entry;
-		}
-	}
-	
-	protected static String getProperty(String property, String description) {
-		String val = System.getProperty(property);
-		if (val == null) {
-			throw new RuntimeException("You must set the property " + property + " with " +
-				description);
-		}
-		LOG.debug(description + " is " + val);
-		return val;
-	 }
-	
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
deleted file mode 100644
index 7512dab..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.util.Properties;
-
-public class JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
-
-  protected static final String URL = "odpi.test.hive.jdbc.url";
-  protected static final String USER = "odpi.test.hive.jdbc.user";
-  protected static final String PASSWD = "odpi.test.hive.jdbc.password";
-  protected static final String LOCATION = "odpi.test.hive.location";
-  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
-  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
-  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
-  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
-  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
-
-  protected static Connection conn;
-
-  @BeforeClass
-  public static void connectToJdbc() throws SQLException {
-    // Assume they've put the URL for the JDBC driver in an environment variable.
-    String jdbcUrl = getProperty(URL, "the JDBC URL");
-    String jdbcUser = getProperty(USER, "the JDBC user name");
-    String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
-
-    Properties props = new Properties();
-    props.put("user", jdbcUser);
-    if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
-    conn = DriverManager.getConnection(jdbcUrl, props);
-  }
-
-  @AfterClass
-  public static void closeJdbc() throws SQLException {
-    if (conn != null) conn.close();
-  }
-
-  protected static String getProperty(String property, String description) {
-    String val = System.getProperty(property);
-    if (val == null) {
-      throw new RuntimeException("You must set the property " + property + " with " +
-          description);
-    }
-    LOG.debug(description + " is " + val);
-    return val;
-  }
-
-  protected static boolean testActive(String property, String description) {
-    String val = System.getProperty(property, "true");
-    LOG.debug(description + " is " + val);
-    return Boolean.valueOf(val);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
deleted file mode 100644
index 578621a..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package org.odpi.specs.runtime.hive;
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.util.Map;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-public class TestBeeline {
-	
-	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-	
-	private static final String URL = "odpi.test.hive.jdbc.url";
-	private static final String USER = "odpi.test.hive.jdbc.user";
-	private static final String PASSWD = "odpi.test.hive.jdbc.password";
-	
-	private static Map<String, String> results;
-	private static String beelineUrl; 
-	private static String beelineUser;
-	private static String beelinePasswd;
-	
-	//creating beeline base command with username and password as per inputs
-	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
-
-	@BeforeClass
-	public static void initialSetup(){
-		TestBeeline.beelineUrl = System.getProperty(URL);
-		TestBeeline.beelineUser = System.getProperty(USER);
-		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-
-		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
-		}
-		else if (beelineUser != null && beelineUser != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
-		}
-		else {
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
-		}
-		LOG.info("URL is " + beelineUrl); 
-		LOG.info("User is " + beelineUser);
-		LOG.info("Passwd is " + beelinePasswd); 
-		LOG.info("Passwd is null " + (beelinePasswd == null));
-	}
-
-	@Test
-	public void checkBeeline() {
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineConnect(){
-		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
-		catch (FileNotFoundException e1) {
-			e1.printStackTrace();
-		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
-	}
-	
-	@Test
-	public void checkBeelineHelp(){
-		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromCmdLine(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-		
-		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-		
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineInitFile() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineHiveVar() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
-	}
-	
-	@Test
-	public void checkBeelineFastConnect(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
-	}
-
-	@Test
-	public void checkBeelineVerbose(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineShowHeader(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-
-	@AfterClass
-	public static void cleanup() throws FileNotFoundException {
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
-	}
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
deleted file mode 100644
index 2b70909..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.AfterClass;
-import org.junit.Assert;
-
-public class TestCLI {
-	
-	static Map<String, String> results;
-	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
-	
-	@BeforeClass
-	public static void setup(){
-		
-		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
-		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
-	}
-	
-	@Test
-	public void help(){		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
-		//LOG.info(results.get("exitValue"));
-		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
-		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
-		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
-	}
-	 
-	@Test
-	public void sqlFromCmdLine(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void sqlFromFiles() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void silent() {
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-	}
-	
-	@Test
-	public void verbose(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
-	}
-	
-	@Test
-	public void initialization() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void database(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveConf(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-	}
-	
-	@Test
-	public void variableSubsitution() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveVar() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@AfterClass
-	public static void cleanup(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
-	}
-	 
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
deleted file mode 100644
index 0ea49ce..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.thrift.TException;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Random;
-
-
-public class TestHCatalog {
-  private static final String JOBJAR = "odpi.test.hive.hcat.job.jar";
-  private static final String HCATCORE = "odpi.test.hive.hcat.core.jar";
-
-  private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
-
-  private static IMetaStoreClient client = null;
-  private static HiveConf conf;
-  private static HCatSchema inputSchema;
-  private static HCatSchema outputSchema;
-
-  private Random rand;
-
-  @BeforeClass
-  public static void connect() throws MetaException {
-    if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
-      String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
-          "Hive conf directory ");
-      String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
-          "Hadoop conf directory ");
-      conf = new HiveConf();
-      String fileSep = System.getProperty("file.separator");
-      conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
-      conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
-      client = new HiveMetaStoreClient(conf);
-
-    }
-  }
-
-  @Before
-  public void checkIfActive() {
-    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
-    rand = new Random();
-  }
-
-  @Test
-  public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
-      InterruptedException, URISyntaxException {
-    // Create a table to write to
-    final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema schema = new FieldSchema("line", "string", "");
-    inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
-        HCatFieldSchema.Type.STRING, schema.getComment())));
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
-        "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
-    sd = new StorageDescriptor(Arrays.asList(
-          new FieldSchema("word", "string", ""),
-          new FieldSchema("count", "int", "")),
-        null, "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-    outputSchema = new HCatSchema(Arrays.asList(
-        new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
-        new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
-
-    // LATER Could I use HCatWriter here and the reader to read it?
-    // Write some stuff into a file in the location of the table
-    table = client.getTable("default", inputTable);
-    String inputFile = table.getSd().getLocation() + "/input";
-    Path inputPath = new Path(inputFile);
-    FileSystem fs = FileSystem.get(conf);
-    FSDataOutputStream out = fs.create(inputPath);
-    out.writeChars("Mary had a little lamb\n");
-    out.writeChars("its fleece was white as snow\n");
-    out.writeChars("and everywhere that Mary went\n");
-    out.writeChars("the lamb was sure to go\n");
-    out.close();
-
-    Map<String, String> env = new HashMap<>();
-    env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
-    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
-        .addArgument("--service")
-        .addArgument("jar")
-        .addArgument(System.getProperty(JOBJAR))
-        .addArgument(HCatalogMR.class.getName())
-        .addArgument("-it")
-        .addArgument(inputTable)
-        .addArgument("-ot")
-        .addArgument(outputTable)
-        .addArgument("-is")
-        .addArgument(inputSchema.getSchemaAsTypeString())
-        .addArgument("-os")
-        .addArgument(outputSchema.getSchemaAsTypeString()), env);
-    LOG.info(results.toString());
-    Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
-
-    client.dropTable("default", inputTable);
-    client.dropTable("default", outputTable);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
deleted file mode 100644
index 154fd9c..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ /dev/null
@@ -1,545 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
-
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.SQLWarning;
-import java.sql.Statement;
-import java.sql.Types;
-
-public class TestJdbc extends JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
-
-  /**
-   * Test simple non-statement related class.  setSchema is tested elsewhere because there's work
-   * to do for that one.  Similarly with getMetadata.
-   * @throws SQLException
-   */
-  @Test
-  public void nonStatementCalls() throws SQLException {
-    conn.clearWarnings();
-
-    boolean isAutoCommit = conn.getAutoCommit();
-    LOG.debug("Auto commit is " + isAutoCommit);
-
-    String catalog = conn.getCatalog();
-    LOG.debug("Catalog is " + catalog);
-
-    String schema = conn.getSchema();
-    LOG.debug("Schema is " + schema);
-
-    int txnIsolation = conn.getTransactionIsolation();
-    LOG.debug("Transaction Isolation is " + txnIsolation);
-
-    SQLWarning warning = conn.getWarnings();
-    while (warning != null) {
-      LOG.debug("Found a warning: " + warning.getMessage());
-      warning = warning.getNextWarning();
-    }
-
-    boolean closed = conn.isClosed();
-    LOG.debug("Is closed? " + closed);
-
-    boolean readOnly = conn.isReadOnly();
-    LOG.debug("Is read only?" + readOnly);
-
-    // Hive doesn't support catalogs, so setting this to whatever should be fine.  If we have
-    // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
-    // issues, so we may need to make this value configurable or something.
-    conn.setCatalog("fred");
-  }
-
-  /**
-   * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
-   * that on a valid table.  Same with getFunctions.
-   * @throws SQLException
-   */
-  @Test
-  public void databaseMetaDataCalls() throws SQLException {
-    DatabaseMetaData md = conn.getMetaData();
-
-    boolean boolrc = md.allTablesAreSelectable();
-    LOG.debug("All tables are selectable? " + boolrc);
-
-    String strrc = md.getCatalogSeparator();
-    LOG.debug("Catalog separator " + strrc);
-
-    strrc = md.getCatalogTerm();
-    LOG.debug("Catalog term " + strrc);
-
-    ResultSet rs = md.getCatalogs();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found catalog " + strrc);
-    }
-
-    Connection c = md.getConnection();
-
-    int intrc = md.getDatabaseMajorVersion();
-    LOG.debug("DB major version is " + intrc);
-
-    intrc = md.getDatabaseMinorVersion();
-    LOG.debug("DB minor version is " + intrc);
-
-    strrc = md.getDatabaseProductName();
-    LOG.debug("DB product name is " + strrc);
-
-    strrc = md.getDatabaseProductVersion();
-    LOG.debug("DB product version is " + strrc);
-
-    intrc = md.getDefaultTransactionIsolation();
-    LOG.debug("Default transaction isolation is " + intrc);
-
-    intrc = md.getDriverMajorVersion();
-    LOG.debug("Driver major version is " + intrc);
-
-    intrc = md.getDriverMinorVersion();
-    LOG.debug("Driver minor version is " + intrc);
-
-    strrc = md.getDriverName();
-    LOG.debug("Driver name is " + strrc);
-
-    strrc = md.getDriverVersion();
-    LOG.debug("Driver version is " + strrc);
-
-    strrc = md.getExtraNameCharacters();
-    LOG.debug("Extra name characters is " + strrc);
-
-    strrc = md.getIdentifierQuoteString();
-    LOG.debug("Identifier quote string is " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getImportedKeys("a", "b", "d");
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getIndexInfo("a", "b", "d", true, true);
-
-    intrc = md.getJDBCMajorVersion();
-    LOG.debug("JDBC major version is " + intrc);
-
-    intrc = md.getJDBCMinorVersion();
-    LOG.debug("JDBC minor version is " + intrc);
-
-    intrc = md.getMaxColumnNameLength();
-    LOG.debug("Maximum column name length is " + intrc);
-
-    strrc = md.getNumericFunctions();
-    LOG.debug("Numeric functions are " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getPrimaryKeys("a", "b", "d");
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedureColumns("a", "b", "d", "e");
-
-    strrc = md.getProcedureTerm();
-    LOG.debug("Procedures are called " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedures("a", "b", "d");
-
-    strrc = md.getSchemaTerm();
-    LOG.debug("Schemas are called " + strrc);
-
-    rs = md.getSchemas();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found schema " + strrc);
-    }
-
-    strrc = md.getSearchStringEscape();
-    LOG.debug("Search string escape is " + strrc);
-
-    strrc = md.getStringFunctions();
-    LOG.debug("String functions are " + strrc);
-
-    strrc = md.getSystemFunctions();
-    LOG.debug("System functions are " + strrc);
-
-    rs = md.getTableTypes();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found table type " + strrc);
-    }
-
-    strrc = md.getTimeDateFunctions();
-    LOG.debug("Time/date functions are " + strrc);
-
-    rs = md.getTypeInfo();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found type " + strrc);
-    }
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getUDTs("a", "b", "d", null);
-
-    boolrc = md.supportsAlterTableWithAddColumn();
-    LOG.debug("Supports alter table with add column? " + boolrc);
-
-    boolrc = md.supportsAlterTableWithDropColumn();
-    LOG.debug("Supports alter table with drop column? " + boolrc);
-
-    boolrc = md.supportsBatchUpdates();
-    LOG.debug("Supports batch updates? " + boolrc);
-
-    boolrc = md.supportsCatalogsInDataManipulation();
-    LOG.debug("Supports catalogs in data manipulation? " + boolrc);
-
-    boolrc = md.supportsCatalogsInIndexDefinitions();
-    LOG.debug("Supports catalogs in index definition? " + boolrc);
-
-    boolrc = md.supportsCatalogsInPrivilegeDefinitions();
-    LOG.debug("Supports catalogs in privilege definition? " + boolrc);
-
-    boolrc = md.supportsCatalogsInProcedureCalls();
-    LOG.debug("Supports catalogs in procedure calls? " + boolrc);
-
-    boolrc = md.supportsCatalogsInTableDefinitions();
-    LOG.debug("Supports catalogs in table definition? " + boolrc);
-
-    boolrc = md.supportsColumnAliasing();
-    LOG.debug("Supports column aliasing? " + boolrc);
-
-    boolrc = md.supportsFullOuterJoins();
-    LOG.debug("Supports full outer joins? " + boolrc);
-
-    boolrc = md.supportsGroupBy();
-    LOG.debug("Supports group by? " + boolrc);
-
-    boolrc = md.supportsLimitedOuterJoins();
-    LOG.debug("Supports limited outer joins? " + boolrc);
-
-    boolrc = md.supportsMultipleResultSets();
-    LOG.debug("Supports limited outer joins? " + boolrc);
-
-    boolrc = md.supportsNonNullableColumns();
-    LOG.debug("Supports non-nullable columns? " + boolrc);
-
-    boolrc = md.supportsOuterJoins();
-    LOG.debug("Supports outer joins? " + boolrc);
-
-    boolrc = md.supportsPositionedDelete();
-    LOG.debug("Supports positioned delete? " + boolrc);
-
-    boolrc = md.supportsPositionedUpdate();
-    LOG.debug("Supports positioned update? " + boolrc);
-
-    boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set holdability? " + boolrc);
-
-    boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set type? " + boolrc);
-
-    boolrc = md.supportsSavepoints();
-    LOG.debug("Supports savepoints? " + boolrc);
-
-    boolrc = md.supportsSchemasInDataManipulation();
-    LOG.debug("Supports schemas in data manipulation? " + boolrc);
-
-    boolrc = md.supportsSchemasInIndexDefinitions();
-    LOG.debug("Supports schemas in index definitions? " + boolrc);
-
-    boolrc = md.supportsSchemasInPrivilegeDefinitions();
-    LOG.debug("Supports schemas in privilege definitions? " + boolrc);
-
-    boolrc = md.supportsSchemasInProcedureCalls();
-    LOG.debug("Supports schemas in procedure calls? " + boolrc);
-
-    boolrc = md.supportsSchemasInTableDefinitions();
-    LOG.debug("Supports schemas in table definitions? " + boolrc);
-
-    boolrc = md.supportsSelectForUpdate();
-    LOG.debug("Supports select for update? " + boolrc);
-
-    boolrc = md.supportsStoredProcedures();
-    LOG.debug("Supports stored procedures? " + boolrc);
-
-    boolrc = md.supportsTransactions();
-    LOG.debug("Supports transactions? " + boolrc);
-
-    boolrc = md.supportsUnion();
-    LOG.debug("Supports union? " + boolrc);
-
-    boolrc = md.supportsUnionAll();
-    LOG.debug("Supports union all? " + boolrc);
-
-  }
-
-  @Test
-  public void setSchema() throws SQLException {
-    try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
-        ResultSet.CONCUR_READ_ONLY)) {
-
-      final String dbName = "odpi_jdbc_test_db";
-
-      final String tableName = "odpi_jdbc_test_table";
-      stmt.execute("drop table if exists " + tableName);
-
-      stmt.execute("drop database if exists " + dbName + " cascade");
-      stmt.execute("create database " + dbName);
-
-      conn.setSchema(dbName);
-
-      DatabaseMetaData md = conn.getMetaData();
-
-      ResultSet rs = md.getSchemas(null, dbName);
-
-      while (rs.next()) {
-        String schemaName = rs.getString(2);
-        LOG.debug("Schema name is " + schemaName);
-      }
-
-      stmt.execute("create table " + tableName + " (i int, s varchar(32))");
-
-      rs = md.getTables(null, dbName, tableName, null);
-      while (rs.next()) {
-        String tName = rs.getString(3);
-        LOG.debug("Schema name is " + tName);
-      }
-
-      rs = md.getColumns(null, dbName, tableName, "i");
-      while (rs.next()) {
-        String colName = rs.getString(4);
-        LOG.debug("Schema name is " + colName);
-      }
-
-      rs = md.getFunctions(null, dbName, "foo");
-      while (rs.next()) {
-        String funcName = rs.getString(3);
-        LOG.debug("Schema name is " + funcName);
-      }
-    }
-  }
-
-  @Test
-  public void statement() throws SQLException {
-    try (Statement stmt = conn.createStatement()) {
-      stmt.cancel();
-    }
-
-    try (Statement stmt = conn.createStatement()) {
-      stmt.clearWarnings();
-
-      final String tableName = "odpi_jdbc_statement_test_table";
-
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (a int, b varchar(32))");
-
-      stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
-
-      int intrc = stmt.getUpdateCount();
-      LOG.debug("Update count is " + intrc);
-
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
-
-      Connection localConn = stmt.getConnection();
-
-      intrc = stmt.getFetchDirection();
-      LOG.debug("Fetch direction is " + intrc);
-
-      intrc = stmt.getFetchSize();
-      LOG.debug("Fetch size is " + intrc);
-
-      intrc = stmt.getMaxRows();
-      LOG.debug("max rows is " + intrc);
-
-      boolean boolrc = stmt.getMoreResults();
-      LOG.debug("more results is " + boolrc);
-
-      intrc = stmt.getQueryTimeout();
-      LOG.debug("query timeout is " + intrc);
-
-      stmt.execute("select * from " + tableName);
-      rs = stmt.getResultSet();
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
-
-      intrc = stmt.getResultSetType();
-      LOG.debug("result set type is " + intrc);
-
-      SQLWarning warning = stmt.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
-
-      boolrc = stmt.isClosed();
-      LOG.debug("is closed " + boolrc);
-
-      boolrc = stmt.isCloseOnCompletion();
-      LOG.debug("is close on completion " + boolrc);
-
-      boolrc = stmt.isPoolable();
-      LOG.debug("is poolable " + boolrc);
-
-      stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
-      stmt.setFetchSize(500);
-      stmt.setMaxRows(500);
-    }
-  }
-
-  @Test
-  public void preparedStmtAndResultSet() throws SQLException {
-    final String tableName = "odpi_jdbc_psars_test_table";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
-          "i int, lo bigint, sh smallint, st varchar(32))");
-    }
-
-    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
-    // try to put them in the query.
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
-        " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
-      ps.setBoolean(1, true);
-      ps.setByte(2, (byte)1);
-      ps.setDouble(3, 3.141592654);
-      ps.setFloat(4, 3.14f);
-      ps.setInt(5, 3);
-      ps.setLong(6, 10L);
-      ps.setShort(7, (short)20);
-      ps.setString(8, "abc");
-      ps.executeUpdate();
-    }
-
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
-        "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
-      ps.setNull(1, Types.INTEGER);
-      ps.setObject(2, "mary had a little lamb");
-      ps.executeUpdate();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.clearParameters();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.execute();
-
-    }
-
-    try (Statement stmt = conn.createStatement()) {
-
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
-
-      ResultSetMetaData md = rs.getMetaData();
-
-      int colCnt = md.getColumnCount();
-      LOG.debug("Column count is " + colCnt);
-
-      for (int i = 1; i <= colCnt; i++) {
-        LOG.debug("Looking at column " + i);
-        String strrc = md.getColumnClassName(i);
-        LOG.debug("Column class name is " + strrc);
-
-        int intrc = md.getColumnDisplaySize(i);
-        LOG.debug("Column display size is " + intrc);
-
-        strrc = md.getColumnLabel(i);
-        LOG.debug("Column label is " + strrc);
-
-        strrc = md.getColumnName(i);
-        LOG.debug("Column name is " + strrc);
-
-        intrc = md.getColumnType(i);
-        LOG.debug("Column type is " + intrc);
-
-        strrc = md.getColumnTypeName(i);
-        LOG.debug("Column type name is " + strrc);
-
-        intrc = md.getPrecision(i);
-        LOG.debug("Precision is " + intrc);
-
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
-
-        boolean boolrc = md.isAutoIncrement(i);
-        LOG.debug("Is auto increment? " + boolrc);
-
-        boolrc = md.isCaseSensitive(i);
-        LOG.debug("Is case sensitive? " + boolrc);
-
-        boolrc = md.isCurrency(i);
-        LOG.debug("Is currency? " + boolrc);
-
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
-
-        intrc = md.isNullable(i);
-        LOG.debug("Is nullable? " + intrc);
-
-        boolrc = md.isReadOnly(i);
-        LOG.debug("Is read only? " + boolrc);
-
-      }
-
-      while (rs.next()) {
-        LOG.debug("bo = " + rs.getBoolean(1));
-        LOG.debug("bo = " + rs.getBoolean("bo"));
-        LOG.debug("ti = " + rs.getByte(2));
-        LOG.debug("ti = " + rs.getByte("ti"));
-        LOG.debug("db = " + rs.getDouble(3));
-        LOG.debug("db = " + rs.getDouble("db"));
-        LOG.debug("fl = " + rs.getFloat(4));
-        LOG.debug("fl = " + rs.getFloat("fl"));
-        LOG.debug("i = " + rs.getInt(5));
-        LOG.debug("i = " + rs.getInt("i"));
-        LOG.debug("lo = " + rs.getLong(6));
-        LOG.debug("lo = " + rs.getLong("lo"));
-        LOG.debug("sh = " + rs.getShort(7));
-        LOG.debug("sh = " + rs.getShort("sh"));
-        LOG.debug("st = " + rs.getString(8));
-        LOG.debug("st = " + rs.getString("st"));
-        LOG.debug("tm = " + rs.getObject(8));
-        LOG.debug("tm = " + rs.getObject("st"));
-        LOG.debug("tm was null " + rs.wasNull());
-      }
-      LOG.debug("bo is column " + rs.findColumn("bo"));
-
-      int intrc = rs.getConcurrency();
-      LOG.debug("concurrency " + intrc);
-
-      intrc = rs.getFetchDirection();
-      LOG.debug("fetch direction " + intrc);
-
-      intrc = rs.getType();
-      LOG.debug("type " + intrc);
-
-      Statement copy = rs.getStatement();
-
-      SQLWarning warning = rs.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
-      rs.clearWarnings();
-    }
-  }
-}