You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:28:19 UTC
[48/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime
tests option into the battery of smoke tests
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index c55bb92..2341e9b 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -28,186 +28,209 @@ import org.junit.AfterClass;
import org.junit.Assert;
public class TestCLI {
-
- static Map<String, String> results;
- static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
-
- @BeforeClass
- public static void setup(){
-
- results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
- Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
- }
-
- @Test
- public void help(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
- //LOG.info(results.get("exitValue"));
- Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
- Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
- Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
- }
-
- @Test
- public void sqlFromCmdLine(){
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void sqlFromFiles() throws FileNotFoundException{
- try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
- try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void silent() {
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
- }
-
- @Test
- public void verbose(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
- }
-
- @Test
- public void initialization() throws FileNotFoundException{
- try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
- try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void database(){
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void hiveConf(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
- }
-
- @Test
- public void variableSubsitution() throws FileNotFoundException{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));
- Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));
- Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @Test
- public void hiveVar() throws FileNotFoundException{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
- if(!results.get("outputStream").contains("bigtop_runtime_hive")){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }else{
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
- try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));
- Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));
- Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
- Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- }
-
- @AfterClass
- public static void cleanup(){
- results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
- results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
- }
-
+
+ static Map<String, String> results;
+ static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
+
+ @BeforeClass
+ public static void setup() {
+
+ results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+ Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+
+ @Test
+ public void help() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+ //LOG.info(results.get("exitValue"));
+ Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+ Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+ Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+ }
+
+ @Test
+ public void sqlFromCmdLine() {
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void sqlFromFiles() throws FileNotFoundException {
+ try (PrintWriter out = new PrintWriter("hive-f1.sql")) {
+ out.println("SHOW DATABASES;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f2.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f3.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-f4.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void silent() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+ }
+
+ @Test
+ public void verbose() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+ }
+
+ @Test
+ public void initialization() throws FileNotFoundException {
+ try (PrintWriter out = new PrintWriter("hive-init1.sql")) {
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+ try (PrintWriter out = new PrintWriter("hive-init2.sql")) {
+ out.println("DROP DATABASE bigtop_runtime_hive;");
+ out.println("CREATE DATABASE bigtop_runtime_hive;");
+ }
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void database() {
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Non-existent database returned with wrong exit code: " + Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void hiveConf() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+ }
+
+ @Test
+ public void variableSubsitution() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ try (PrintWriter out = new PrintWriter("hive-define.sql")) {
+ out.println("show ${A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+ Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+ Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @Test
+ public void hiveVar() throws FileNotFoundException {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+ if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ } else {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+ try (PrintWriter out = new PrintWriter("hive-var.sql")) {
+ out.println("show ${A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '" + db + "' < hive-var.sql", false));
+ Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ try (PrintWriter out = new PrintWriter("hiveconf-var.sql")) {
+ out.println("show ${hiveconf:A};");
+ out.println("quit;");
+ }
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '" + db + "' < hiveconf-var.sql", false));
+ Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+ Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ }
+
+ @AfterClass
+ public static void cleanup() {
+ results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+ results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+ }
+
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 8bf7141..bb4287f 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -51,108 +51,108 @@ import java.util.Random;
public class TestHCatalog {
- private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
- private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
-
- private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
-
- private static IMetaStoreClient client = null;
- private static HiveConf conf;
- private static HCatSchema inputSchema;
- private static HCatSchema outputSchema;
-
- private Random rand;
-
- @BeforeClass
- public static void connect() throws MetaException {
- if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
- String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
- "Hive conf directory ");
- String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
- "Hadoop conf directory ");
- conf = new HiveConf();
- String fileSep = System.getProperty("file.separator");
- conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
- conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
- conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
- client = new HiveMetaStoreClient(conf);
+ private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
+ private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
+
+ private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
+
+ private static IMetaStoreClient client = null;
+ private static HiveConf conf;
+ private static HCatSchema inputSchema;
+ private static HCatSchema outputSchema;
+
+ private Random rand;
+
+ @BeforeClass
+ public static void connect() throws MetaException {
+ if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
+ String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
+ "Hive conf directory ");
+ String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
+ "Hadoop conf directory ");
+ conf = new HiveConf();
+ String fileSep = System.getProperty("file.separator");
+ conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
+ conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
+ conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
+ client = new HiveMetaStoreClient(conf);
+
+ }
+ }
+
+ @Before
+ public void checkIfActive() {
+ Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
+ rand = new Random();
+ }
+ @Test
+ public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
+ InterruptedException, URISyntaxException {
+ // Create a table to write to
+ final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+ SerDeInfo serde = new SerDeInfo("default_serde",
+ conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+ FieldSchema schema = new FieldSchema("line", "string", "");
+ inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
+ HCatFieldSchema.Type.STRING, schema.getComment())));
+ StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
+ "org.apache.hadoop.mapred.TextInputFormat",
+ "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+ new HashMap<String, String>());
+ Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+
+ final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+ sd = new StorageDescriptor(Arrays.asList(
+ new FieldSchema("word", "string", ""),
+ new FieldSchema("count", "int", "")),
+ null, "org.apache.hadoop.mapred.TextInputFormat",
+ "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+ new HashMap<String, String>());
+ table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
+ new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+ client.createTable(table);
+ outputSchema = new HCatSchema(Arrays.asList(
+ new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
+ new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
+
+ // LATER Could I use HCatWriter here and the reader to read it?
+ // Write some stuff into a file in the location of the table
+ table = client.getTable("default", inputTable);
+ String inputFile = table.getSd().getLocation() + "/input";
+ Path inputPath = new Path(inputFile);
+ FileSystem fs = FileSystem.get(conf);
+ FSDataOutputStream out = fs.create(inputPath);
+ out.writeChars("Mary had a little lamb\n");
+ out.writeChars("its fleece was white as snow\n");
+ out.writeChars("and everywhere that Mary went\n");
+ out.writeChars("the lamb was sure to go\n");
+ out.close();
+
+ Map<String, String> env = new HashMap<>();
+ env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
+ Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
+ .addArgument("--service")
+ .addArgument("jar")
+ .addArgument(System.getProperty(JOBJAR))
+ .addArgument(HCatalogMR.class.getName())
+ .addArgument("-it")
+ .addArgument(inputTable)
+ .addArgument("-ot")
+ .addArgument(outputTable)
+ .addArgument("-is")
+ .addArgument(inputSchema.getSchemaAsTypeString())
+ .addArgument("-os")
+ .addArgument(outputSchema.getSchemaAsTypeString()), env);
+ LOG.info(results.toString());
+ Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
+
+ client.dropTable("default", inputTable);
+ client.dropTable("default", outputTable);
}
- }
-
- @Before
- public void checkIfActive() {
- Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
- rand = new Random();
- }
-
- @Test
- public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
- InterruptedException, URISyntaxException {
- // Create a table to write to
- final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
- SerDeInfo serde = new SerDeInfo("default_serde",
- conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
- FieldSchema schema = new FieldSchema("line", "string", "");
- inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
- HCatFieldSchema.Type.STRING, schema.getComment())));
- StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
- "org.apache.hadoop.mapred.TextInputFormat",
- "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
- new HashMap<String, String>());
- Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
-
- final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
- sd = new StorageDescriptor(Arrays.asList(
- new FieldSchema("word", "string", ""),
- new FieldSchema("count", "int", "")),
- null, "org.apache.hadoop.mapred.TextInputFormat",
- "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
- new HashMap<String, String>());
- table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
- new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
- client.createTable(table);
- outputSchema = new HCatSchema(Arrays.asList(
- new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
- new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
-
- // LATER Could I use HCatWriter here and the reader to read it?
- // Write some stuff into a file in the location of the table
- table = client.getTable("default", inputTable);
- String inputFile = table.getSd().getLocation() + "/input";
- Path inputPath = new Path(inputFile);
- FileSystem fs = FileSystem.get(conf);
- FSDataOutputStream out = fs.create(inputPath);
- out.writeChars("Mary had a little lamb\n");
- out.writeChars("its fleece was white as snow\n");
- out.writeChars("and everywhere that Mary went\n");
- out.writeChars("the lamb was sure to go\n");
- out.close();
-
- Map<String, String> env = new HashMap<>();
- env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
- Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
- .addArgument("--service")
- .addArgument("jar")
- .addArgument(System.getProperty(JOBJAR))
- .addArgument(HCatalogMR.class.getName())
- .addArgument("-it")
- .addArgument(inputTable)
- .addArgument("-ot")
- .addArgument(outputTable)
- .addArgument("-is")
- .addArgument(inputSchema.getSchemaAsTypeString())
- .addArgument("-os")
- .addArgument(outputSchema.getSchemaAsTypeString()), env);
- LOG.info(results.toString());
- Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
-
- client.dropTable("default", inputTable);
- client.dropTable("default", outputTable);
- }
}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
index a5a896a..6356640 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -32,514 +32,516 @@ import java.sql.Statement;
import java.sql.Types;
public class TestJdbc extends JdbcConnector {
- private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
-
- /**
- * Test simple non-statement related class. setSchema is tested elsewhere because there's work
- * to do for that one. Similarly with getMetadata.
- * @throws SQLException
- */
- @Test
- public void nonStatementCalls() throws SQLException {
- conn.clearWarnings();
-
- boolean isAutoCommit = conn.getAutoCommit();
- LOG.debug("Auto commit is " + isAutoCommit);
-
- String catalog = conn.getCatalog();
- LOG.debug("Catalog is " + catalog);
-
- String schema = conn.getSchema();
- LOG.debug("Schema is " + schema);
-
- int txnIsolation = conn.getTransactionIsolation();
- LOG.debug("Transaction Isolation is " + txnIsolation);
-
- SQLWarning warning = conn.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
+ private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
+
+ /**
+ * Test simple non-statement related class. setSchema is tested elsewhere because there's work
+ * to do for that one. Similarly with getMetadata.
+ *
+ * @throws SQLException
+ */
+ @Test
+ public void nonStatementCalls() throws SQLException {
+ conn.clearWarnings();
+
+ boolean isAutoCommit = conn.getAutoCommit();
+ LOG.debug("Auto commit is " + isAutoCommit);
+
+ String catalog = conn.getCatalog();
+ LOG.debug("Catalog is " + catalog);
+
+ String schema = conn.getSchema();
+ LOG.debug("Schema is " + schema);
+
+ int txnIsolation = conn.getTransactionIsolation();
+ LOG.debug("Transaction Isolation is " + txnIsolation);
+
+ SQLWarning warning = conn.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
+
+ boolean closed = conn.isClosed();
+ LOG.debug("Is closed? " + closed);
+
+ boolean readOnly = conn.isReadOnly();
+ LOG.debug("Is read only?" + readOnly);
+
+ // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have
+ // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
+ // issues, so we may need to make this value configurable or something.
+ conn.setCatalog("fred");
}
- boolean closed = conn.isClosed();
- LOG.debug("Is closed? " + closed);
+ /**
+ * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call
+ * that on a valid table. Same with getFunctions.
+ *
+ * @throws SQLException
+ */
+ @Test
+ public void databaseMetaDataCalls() throws SQLException {
+ DatabaseMetaData md = conn.getMetaData();
- boolean readOnly = conn.isReadOnly();
- LOG.debug("Is read only?" + readOnly);
+ boolean boolrc = md.allTablesAreSelectable();
+ LOG.debug("All tables are selectable? " + boolrc);
- // Hive doesn't support catalogs, so setting this to whatever should be fine. If we have
- // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
- // issues, so we may need to make this value configurable or something.
- conn.setCatalog("fred");
- }
-
- /**
- * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call
- * that on a valid table. Same with getFunctions.
- * @throws SQLException
- */
- @Test
- public void databaseMetaDataCalls() throws SQLException {
- DatabaseMetaData md = conn.getMetaData();
-
- boolean boolrc = md.allTablesAreSelectable();
- LOG.debug("All tables are selectable? " + boolrc);
-
- String strrc = md.getCatalogSeparator();
- LOG.debug("Catalog separator " + strrc);
-
- strrc = md.getCatalogTerm();
- LOG.debug("Catalog term " + strrc);
-
- ResultSet rs = md.getCatalogs();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found catalog " + strrc);
- }
+ String strrc = md.getCatalogSeparator();
+ LOG.debug("Catalog separator " + strrc);
- Connection c = md.getConnection();
+ strrc = md.getCatalogTerm();
+ LOG.debug("Catalog term " + strrc);
- int intrc = md.getDatabaseMajorVersion();
- LOG.debug("DB major version is " + intrc);
+ ResultSet rs = md.getCatalogs();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found catalog " + strrc);
+ }
- intrc = md.getDatabaseMinorVersion();
- LOG.debug("DB minor version is " + intrc);
+ Connection c = md.getConnection();
- strrc = md.getDatabaseProductName();
- LOG.debug("DB product name is " + strrc);
+ int intrc = md.getDatabaseMajorVersion();
+ LOG.debug("DB major version is " + intrc);
- strrc = md.getDatabaseProductVersion();
- LOG.debug("DB product version is " + strrc);
+ intrc = md.getDatabaseMinorVersion();
+ LOG.debug("DB minor version is " + intrc);
- intrc = md.getDefaultTransactionIsolation();
- LOG.debug("Default transaction isolation is " + intrc);
+ strrc = md.getDatabaseProductName();
+ LOG.debug("DB product name is " + strrc);
- intrc = md.getDriverMajorVersion();
- LOG.debug("Driver major version is " + intrc);
+ strrc = md.getDatabaseProductVersion();
+ LOG.debug("DB product version is " + strrc);
- intrc = md.getDriverMinorVersion();
- LOG.debug("Driver minor version is " + intrc);
+ intrc = md.getDefaultTransactionIsolation();
+ LOG.debug("Default transaction isolation is " + intrc);
- strrc = md.getDriverName();
- LOG.debug("Driver name is " + strrc);
+ intrc = md.getDriverMajorVersion();
+ LOG.debug("Driver major version is " + intrc);
- strrc = md.getDriverVersion();
- LOG.debug("Driver version is " + strrc);
+ intrc = md.getDriverMinorVersion();
+ LOG.debug("Driver minor version is " + intrc);
- strrc = md.getExtraNameCharacters();
- LOG.debug("Extra name characters is " + strrc);
+ strrc = md.getDriverName();
+ LOG.debug("Driver name is " + strrc);
- strrc = md.getIdentifierQuoteString();
- LOG.debug("Identifier quote string is " + strrc);
+ strrc = md.getDriverVersion();
+ LOG.debug("Driver version is " + strrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getImportedKeys("a", "b", "d");
+ strrc = md.getExtraNameCharacters();
+ LOG.debug("Extra name characters is " + strrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getIndexInfo("a", "b", "d", true, true);
+ strrc = md.getIdentifierQuoteString();
+ LOG.debug("Identifier quote string is " + strrc);
- intrc = md.getJDBCMajorVersion();
- LOG.debug("JDBC major version is " + intrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getImportedKeys("a", "b", "d");
- intrc = md.getJDBCMinorVersion();
- LOG.debug("JDBC minor version is " + intrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getIndexInfo("a", "b", "d", true, true);
- intrc = md.getMaxColumnNameLength();
- LOG.debug("Maximum column name length is " + intrc);
+ intrc = md.getJDBCMajorVersion();
+ LOG.debug("JDBC major version is " + intrc);
- strrc = md.getNumericFunctions();
- LOG.debug("Numeric functions are " + strrc);
+ intrc = md.getJDBCMinorVersion();
+ LOG.debug("JDBC minor version is " + intrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getPrimaryKeys("a", "b", "d");
+ intrc = md.getMaxColumnNameLength();
+ LOG.debug("Maximum column name length is " + intrc);
- // In Hive 1.2 this always returns an empty RS
- rs = md.getProcedureColumns("a", "b", "d", "e");
+ strrc = md.getNumericFunctions();
+ LOG.debug("Numeric functions are " + strrc);
- strrc = md.getProcedureTerm();
- LOG.debug("Procedures are called " + strrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getPrimaryKeys("a", "b", "d");
- // In Hive 1.2 this always returns an empty RS
- rs = md.getProcedures("a", "b", "d");
-
- strrc = md.getSchemaTerm();
- LOG.debug("Schemas are called " + strrc);
-
- rs = md.getSchemas();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found schema " + strrc);
- }
-
- strrc = md.getSearchStringEscape();
- LOG.debug("Search string escape is " + strrc);
-
- strrc = md.getStringFunctions();
- LOG.debug("String functions are " + strrc);
-
- strrc = md.getSystemFunctions();
- LOG.debug("System functions are " + strrc);
-
- rs = md.getTableTypes();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found table type " + strrc);
- }
-
- strrc = md.getTimeDateFunctions();
- LOG.debug("Time/date functions are " + strrc);
-
- rs = md.getTypeInfo();
- while (rs.next()) {
- strrc = rs.getString(1);
- LOG.debug("Found type " + strrc);
- }
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getProcedureColumns("a", "b", "d", "e");
- // In Hive 1.2 this always returns an empty RS
- rs = md.getUDTs("a", "b", "d", null);
+ strrc = md.getProcedureTerm();
+ LOG.debug("Procedures are called " + strrc);
- boolrc = md.supportsAlterTableWithAddColumn();
- LOG.debug("Supports alter table with add column? " + boolrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getProcedures("a", "b", "d");
- boolrc = md.supportsAlterTableWithDropColumn();
- LOG.debug("Supports alter table with drop column? " + boolrc);
+ strrc = md.getSchemaTerm();
+ LOG.debug("Schemas are called " + strrc);
- boolrc = md.supportsBatchUpdates();
- LOG.debug("Supports batch updates? " + boolrc);
+ rs = md.getSchemas();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found schema " + strrc);
+ }
- boolrc = md.supportsCatalogsInDataManipulation();
- LOG.debug("Supports catalogs in data manipulation? " + boolrc);
+ strrc = md.getSearchStringEscape();
+ LOG.debug("Search string escape is " + strrc);
- boolrc = md.supportsCatalogsInIndexDefinitions();
- LOG.debug("Supports catalogs in index definition? " + boolrc);
+ strrc = md.getStringFunctions();
+ LOG.debug("String functions are " + strrc);
- boolrc = md.supportsCatalogsInPrivilegeDefinitions();
- LOG.debug("Supports catalogs in privilege definition? " + boolrc);
+ strrc = md.getSystemFunctions();
+ LOG.debug("System functions are " + strrc);
- boolrc = md.supportsCatalogsInProcedureCalls();
- LOG.debug("Supports catalogs in procedure calls? " + boolrc);
+ rs = md.getTableTypes();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found table type " + strrc);
+ }
- boolrc = md.supportsCatalogsInTableDefinitions();
- LOG.debug("Supports catalogs in table definition? " + boolrc);
+ strrc = md.getTimeDateFunctions();
+ LOG.debug("Time/date functions are " + strrc);
- boolrc = md.supportsColumnAliasing();
- LOG.debug("Supports column aliasing? " + boolrc);
+ rs = md.getTypeInfo();
+ while (rs.next()) {
+ strrc = rs.getString(1);
+ LOG.debug("Found type " + strrc);
+ }
- boolrc = md.supportsFullOuterJoins();
- LOG.debug("Supports full outer joins? " + boolrc);
+ // In Hive 1.2 this always returns an empty RS
+ rs = md.getUDTs("a", "b", "d", null);
- boolrc = md.supportsGroupBy();
- LOG.debug("Supports group by? " + boolrc);
+ boolrc = md.supportsAlterTableWithAddColumn();
+ LOG.debug("Supports alter table with add column? " + boolrc);
- boolrc = md.supportsLimitedOuterJoins();
- LOG.debug("Supports limited outer joins? " + boolrc);
+ boolrc = md.supportsAlterTableWithDropColumn();
+ LOG.debug("Supports alter table with drop column? " + boolrc);
- boolrc = md.supportsMultipleResultSets();
- LOG.debug("Supports limited outer joins? " + boolrc);
+ boolrc = md.supportsBatchUpdates();
+ LOG.debug("Supports batch updates? " + boolrc);
- boolrc = md.supportsNonNullableColumns();
- LOG.debug("Supports non-nullable columns? " + boolrc);
+ boolrc = md.supportsCatalogsInDataManipulation();
+ LOG.debug("Supports catalogs in data manipulation? " + boolrc);
- boolrc = md.supportsOuterJoins();
- LOG.debug("Supports outer joins? " + boolrc);
+ boolrc = md.supportsCatalogsInIndexDefinitions();
+ LOG.debug("Supports catalogs in index definition? " + boolrc);
- boolrc = md.supportsPositionedDelete();
- LOG.debug("Supports positioned delete? " + boolrc);
+ boolrc = md.supportsCatalogsInPrivilegeDefinitions();
+ LOG.debug("Supports catalogs in privilege definition? " + boolrc);
- boolrc = md.supportsPositionedUpdate();
- LOG.debug("Supports positioned update? " + boolrc);
+ boolrc = md.supportsCatalogsInProcedureCalls();
+ LOG.debug("Supports catalogs in procedure calls? " + boolrc);
- boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
- LOG.debug("Supports result set holdability? " + boolrc);
+ boolrc = md.supportsCatalogsInTableDefinitions();
+ LOG.debug("Supports catalogs in table definition? " + boolrc);
- boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
- LOG.debug("Supports result set type? " + boolrc);
+ boolrc = md.supportsColumnAliasing();
+ LOG.debug("Supports column aliasing? " + boolrc);
- boolrc = md.supportsSavepoints();
- LOG.debug("Supports savepoints? " + boolrc);
+ boolrc = md.supportsFullOuterJoins();
+ LOG.debug("Supports full outer joins? " + boolrc);
- boolrc = md.supportsSchemasInDataManipulation();
- LOG.debug("Supports schemas in data manipulation? " + boolrc);
+ boolrc = md.supportsGroupBy();
+ LOG.debug("Supports group by? " + boolrc);
- boolrc = md.supportsSchemasInIndexDefinitions();
- LOG.debug("Supports schemas in index definitions? " + boolrc);
+ boolrc = md.supportsLimitedOuterJoins();
+ LOG.debug("Supports limited outer joins? " + boolrc);
- boolrc = md.supportsSchemasInPrivilegeDefinitions();
- LOG.debug("Supports schemas in privilege definitions? " + boolrc);
+ boolrc = md.supportsMultipleResultSets();
+ LOG.debug("Supports limited outer joins? " + boolrc);
- boolrc = md.supportsSchemasInProcedureCalls();
- LOG.debug("Supports schemas in procedure calls? " + boolrc);
+ boolrc = md.supportsNonNullableColumns();
+ LOG.debug("Supports non-nullable columns? " + boolrc);
- boolrc = md.supportsSchemasInTableDefinitions();
- LOG.debug("Supports schemas in table definitions? " + boolrc);
+ boolrc = md.supportsOuterJoins();
+ LOG.debug("Supports outer joins? " + boolrc);
- boolrc = md.supportsSelectForUpdate();
- LOG.debug("Supports select for update? " + boolrc);
+ boolrc = md.supportsPositionedDelete();
+ LOG.debug("Supports positioned delete? " + boolrc);
- boolrc = md.supportsStoredProcedures();
- LOG.debug("Supports stored procedures? " + boolrc);
+ boolrc = md.supportsPositionedUpdate();
+ LOG.debug("Supports positioned update? " + boolrc);
- boolrc = md.supportsTransactions();
- LOG.debug("Supports transactions? " + boolrc);
+ boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ LOG.debug("Supports result set holdability? " + boolrc);
- boolrc = md.supportsUnion();
- LOG.debug("Supports union? " + boolrc);
+ boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+ LOG.debug("Supports result set type? " + boolrc);
- boolrc = md.supportsUnionAll();
- LOG.debug("Supports union all? " + boolrc);
+ boolrc = md.supportsSavepoints();
+ LOG.debug("Supports savepoints? " + boolrc);
- }
+ boolrc = md.supportsSchemasInDataManipulation();
+ LOG.debug("Supports schemas in data manipulation? " + boolrc);
- @Test
- public void setSchema() throws SQLException {
- try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
- ResultSet.CONCUR_READ_ONLY)) {
+ boolrc = md.supportsSchemasInIndexDefinitions();
+ LOG.debug("Supports schemas in index definitions? " + boolrc);
- final String dbName = "bigtop_jdbc_test_db";
+ boolrc = md.supportsSchemasInPrivilegeDefinitions();
+ LOG.debug("Supports schemas in privilege definitions? " + boolrc);
- final String tableName = "bigtop_jdbc_test_table";
- stmt.execute("drop table if exists " + tableName);
+ boolrc = md.supportsSchemasInProcedureCalls();
+ LOG.debug("Supports schemas in procedure calls? " + boolrc);
- stmt.execute("drop database if exists " + dbName + " cascade");
- stmt.execute("create database " + dbName);
+ boolrc = md.supportsSchemasInTableDefinitions();
+ LOG.debug("Supports schemas in table definitions? " + boolrc);
- conn.setSchema(dbName);
+ boolrc = md.supportsSelectForUpdate();
+ LOG.debug("Supports select for update? " + boolrc);
- DatabaseMetaData md = conn.getMetaData();
+ boolrc = md.supportsStoredProcedures();
+ LOG.debug("Supports stored procedures? " + boolrc);
- ResultSet rs = md.getSchemas(null, dbName);
+ boolrc = md.supportsTransactions();
+ LOG.debug("Supports transactions? " + boolrc);
- while (rs.next()) {
- String schemaName = rs.getString(2);
- LOG.debug("Schema name is " + schemaName);
- }
+ boolrc = md.supportsUnion();
+ LOG.debug("Supports union? " + boolrc);
- stmt.execute("create table " + tableName + " (i int, s varchar(32))");
+ boolrc = md.supportsUnionAll();
+ LOG.debug("Supports union all? " + boolrc);
- rs = md.getTables(null, dbName, tableName, null);
- while (rs.next()) {
- String tName = rs.getString(3);
- LOG.debug("Schema name is " + tName);
- }
-
- rs = md.getColumns(null, dbName, tableName, "i");
- while (rs.next()) {
- String colName = rs.getString(4);
- LOG.debug("Schema name is " + colName);
- }
-
- rs = md.getFunctions(null, dbName, "foo");
- while (rs.next()) {
- String funcName = rs.getString(3);
- LOG.debug("Schema name is " + funcName);
- }
- }
- }
-
- @Test
- public void statement() throws SQLException {
- try (Statement stmt = conn.createStatement()) {
- stmt.cancel();
}
- try (Statement stmt = conn.createStatement()) {
- stmt.clearWarnings();
-
- final String tableName = "bigtop_jdbc_statement_test_table";
-
- stmt.execute("drop table if exists " + tableName);
- stmt.execute("create table " + tableName + " (a int, b varchar(32))");
-
- stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
-
- int intrc = stmt.getUpdateCount();
- LOG.debug("Update count is " + intrc);
-
- ResultSet rs = stmt.executeQuery("select * from " + tableName);
- while (rs.next()) {
- LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
- }
-
- Connection localConn = stmt.getConnection();
-
- intrc = stmt.getFetchDirection();
- LOG.debug("Fetch direction is " + intrc);
+ @Test
+ public void setSchema() throws SQLException {
+ try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+ ResultSet.CONCUR_READ_ONLY)) {
- intrc = stmt.getFetchSize();
- LOG.debug("Fetch size is " + intrc);
+ final String dbName = "bigtop_jdbc_test_db";
- intrc = stmt.getMaxRows();
- LOG.debug("max rows is " + intrc);
+ final String tableName = "bigtop_jdbc_test_table";
+ stmt.execute("drop table if exists " + tableName);
- boolean boolrc = stmt.getMoreResults();
- LOG.debug("more results is " + boolrc);
+ stmt.execute("drop database if exists " + dbName + " cascade");
+ stmt.execute("create database " + dbName);
- intrc = stmt.getQueryTimeout();
- LOG.debug("query timeout is " + intrc);
+ conn.setSchema(dbName);
- stmt.execute("select * from " + tableName);
- rs = stmt.getResultSet();
- while (rs.next()) {
- LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
- }
+ DatabaseMetaData md = conn.getMetaData();
- intrc = stmt.getResultSetType();
- LOG.debug("result set type is " + intrc);
+ ResultSet rs = md.getSchemas(null, dbName);
- SQLWarning warning = stmt.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
- }
+ while (rs.next()) {
+ String schemaName = rs.getString(2);
+ LOG.debug("Schema name is " + schemaName);
+ }
- boolrc = stmt.isClosed();
- LOG.debug("is closed " + boolrc);
+ stmt.execute("create table " + tableName + " (i int, s varchar(32))");
- boolrc = stmt.isCloseOnCompletion();
- LOG.debug("is close on completion " + boolrc);
+ rs = md.getTables(null, dbName, tableName, null);
+ while (rs.next()) {
+ String tName = rs.getString(3);
+ LOG.debug("Schema name is " + tName);
+ }
- boolrc = stmt.isPoolable();
- LOG.debug("is poolable " + boolrc);
-
- stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
- stmt.setFetchSize(500);
- stmt.setMaxRows(500);
- }
- }
-
- @Test
- public void preparedStmtAndResultSet() throws SQLException {
- final String tableName = "bigtop_jdbc_psars_test_table";
- try (Statement stmt = conn.createStatement()) {
- stmt.execute("drop table if exists " + tableName);
- stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
- "i int, lo bigint, sh smallint, st varchar(32))");
- }
-
- // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
- // try to put them in the query.
- try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
- " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
- ps.setBoolean(1, true);
- ps.setByte(2, (byte)1);
- ps.setDouble(3, 3.141592654);
- ps.setFloat(4, 3.14f);
- ps.setInt(5, 3);
- ps.setLong(6, 10L);
- ps.setShort(7, (short)20);
- ps.setString(8, "abc");
- ps.executeUpdate();
- }
-
- try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
- "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
- ps.setNull(1, Types.INTEGER);
- ps.setObject(2, "mary had a little lamb");
- ps.executeUpdate();
- ps.setNull(1, Types.INTEGER, null);
- ps.setString(2, "its fleece was white as snow");
- ps.clearParameters();
- ps.setNull(1, Types.INTEGER, null);
- ps.setString(2, "its fleece was white as snow");
- ps.execute();
+ rs = md.getColumns(null, dbName, tableName, "i");
+ while (rs.next()) {
+ String colName = rs.getString(4);
+ LOG.debug("Schema name is " + colName);
+ }
+ rs = md.getFunctions(null, dbName, "foo");
+ while (rs.next()) {
+ String funcName = rs.getString(3);
+ LOG.debug("Schema name is " + funcName);
+ }
+ }
}
- try (Statement stmt = conn.createStatement()) {
+ @Test
+ public void statement() throws SQLException {
+ try (Statement stmt = conn.createStatement()) {
+ stmt.cancel();
+ }
- ResultSet rs = stmt.executeQuery("select * from " + tableName);
+ try (Statement stmt = conn.createStatement()) {
+ stmt.clearWarnings();
- ResultSetMetaData md = rs.getMetaData();
+ final String tableName = "bigtop_jdbc_statement_test_table";
- int colCnt = md.getColumnCount();
- LOG.debug("Column count is " + colCnt);
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName + " (a int, b varchar(32))");
- for (int i = 1; i <= colCnt; i++) {
- LOG.debug("Looking at column " + i);
- String strrc = md.getColumnClassName(i);
- LOG.debug("Column class name is " + strrc);
+ stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
- int intrc = md.getColumnDisplaySize(i);
- LOG.debug("Column display size is " + intrc);
+ int intrc = stmt.getUpdateCount();
+ LOG.debug("Update count is " + intrc);
- strrc = md.getColumnLabel(i);
- LOG.debug("Column label is " + strrc);
+ ResultSet rs = stmt.executeQuery("select * from " + tableName);
+ while (rs.next()) {
+ LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+ }
- strrc = md.getColumnName(i);
- LOG.debug("Column name is " + strrc);
+ Connection localConn = stmt.getConnection();
- intrc = md.getColumnType(i);
- LOG.debug("Column type is " + intrc);
+ intrc = stmt.getFetchDirection();
+ LOG.debug("Fetch direction is " + intrc);
- strrc = md.getColumnTypeName(i);
- LOG.debug("Column type name is " + strrc);
+ intrc = stmt.getFetchSize();
+ LOG.debug("Fetch size is " + intrc);
- intrc = md.getPrecision(i);
- LOG.debug("Precision is " + intrc);
+ intrc = stmt.getMaxRows();
+ LOG.debug("max rows is " + intrc);
- intrc = md.getScale(i);
- LOG.debug("Scale is " + intrc);
+ boolean boolrc = stmt.getMoreResults();
+ LOG.debug("more results is " + boolrc);
- boolean boolrc = md.isAutoIncrement(i);
- LOG.debug("Is auto increment? " + boolrc);
+ intrc = stmt.getQueryTimeout();
+ LOG.debug("query timeout is " + intrc);
- boolrc = md.isCaseSensitive(i);
- LOG.debug("Is case sensitive? " + boolrc);
+ stmt.execute("select * from " + tableName);
+ rs = stmt.getResultSet();
+ while (rs.next()) {
+ LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+ }
- boolrc = md.isCurrency(i);
- LOG.debug("Is currency? " + boolrc);
+ intrc = stmt.getResultSetType();
+ LOG.debug("result set type is " + intrc);
- intrc = md.getScale(i);
- LOG.debug("Scale is " + intrc);
+ SQLWarning warning = stmt.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
- intrc = md.isNullable(i);
- LOG.debug("Is nullable? " + intrc);
+ boolrc = stmt.isClosed();
+ LOG.debug("is closed " + boolrc);
- boolrc = md.isReadOnly(i);
- LOG.debug("Is read only? " + boolrc);
+ boolrc = stmt.isCloseOnCompletion();
+ LOG.debug("is close on completion " + boolrc);
- }
+ boolrc = stmt.isPoolable();
+ LOG.debug("is poolable " + boolrc);
- while (rs.next()) {
- LOG.debug("bo = " + rs.getBoolean(1));
- LOG.debug("bo = " + rs.getBoolean("bo"));
- LOG.debug("ti = " + rs.getByte(2));
- LOG.debug("ti = " + rs.getByte("ti"));
- LOG.debug("db = " + rs.getDouble(3));
- LOG.debug("db = " + rs.getDouble("db"));
- LOG.debug("fl = " + rs.getFloat(4));
- LOG.debug("fl = " + rs.getFloat("fl"));
- LOG.debug("i = " + rs.getInt(5));
- LOG.debug("i = " + rs.getInt("i"));
- LOG.debug("lo = " + rs.getLong(6));
- LOG.debug("lo = " + rs.getLong("lo"));
- LOG.debug("sh = " + rs.getShort(7));
- LOG.debug("sh = " + rs.getShort("sh"));
- LOG.debug("st = " + rs.getString(8));
- LOG.debug("st = " + rs.getString("st"));
- LOG.debug("tm = " + rs.getObject(8));
- LOG.debug("tm = " + rs.getObject("st"));
- LOG.debug("tm was null " + rs.wasNull());
- }
- LOG.debug("bo is column " + rs.findColumn("bo"));
-
- int intrc = rs.getConcurrency();
- LOG.debug("concurrency " + intrc);
-
- intrc = rs.getFetchDirection();
- LOG.debug("fetch direction " + intrc);
-
- intrc = rs.getType();
- LOG.debug("type " + intrc);
-
- Statement copy = rs.getStatement();
+ stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
+ stmt.setFetchSize(500);
+ stmt.setMaxRows(500);
+ }
+ }
- SQLWarning warning = rs.getWarnings();
- while (warning != null) {
- LOG.debug("Found a warning: " + warning.getMessage());
- warning = warning.getNextWarning();
- }
- rs.clearWarnings();
+ @Test
+ public void preparedStmtAndResultSet() throws SQLException {
+ final String tableName = "bigtop_jdbc_psars_test_table";
+ try (Statement stmt = conn.createStatement()) {
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
+ "i int, lo bigint, sh smallint, st varchar(32))");
+ }
+
+ // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
+ // try to put them in the query.
+ try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
+ " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
+ ps.setBoolean(1, true);
+ ps.setByte(2, (byte) 1);
+ ps.setDouble(3, 3.141592654);
+ ps.setFloat(4, 3.14f);
+ ps.setInt(5, 3);
+ ps.setLong(6, 10L);
+ ps.setShort(7, (short) 20);
+ ps.setString(8, "abc");
+ ps.executeUpdate();
+ }
+
+ try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
+ "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
+ ps.setNull(1, Types.INTEGER);
+ ps.setObject(2, "mary had a little lamb");
+ ps.executeUpdate();
+ ps.setNull(1, Types.INTEGER, null);
+ ps.setString(2, "its fleece was white as snow");
+ ps.clearParameters();
+ ps.setNull(1, Types.INTEGER, null);
+ ps.setString(2, "its fleece was white as snow");
+ ps.execute();
+
+ }
+
+ try (Statement stmt = conn.createStatement()) {
+
+ ResultSet rs = stmt.executeQuery("select * from " + tableName);
+
+ ResultSetMetaData md = rs.getMetaData();
+
+ int colCnt = md.getColumnCount();
+ LOG.debug("Column count is " + colCnt);
+
+ for (int i = 1; i <= colCnt; i++) {
+ LOG.debug("Looking at column " + i);
+ String strrc = md.getColumnClassName(i);
+ LOG.debug("Column class name is " + strrc);
+
+ int intrc = md.getColumnDisplaySize(i);
+ LOG.debug("Column display size is " + intrc);
+
+ strrc = md.getColumnLabel(i);
+ LOG.debug("Column label is " + strrc);
+
+ strrc = md.getColumnName(i);
+ LOG.debug("Column name is " + strrc);
+
+ intrc = md.getColumnType(i);
+ LOG.debug("Column type is " + intrc);
+
+ strrc = md.getColumnTypeName(i);
+ LOG.debug("Column type name is " + strrc);
+
+ intrc = md.getPrecision(i);
+ LOG.debug("Precision is " + intrc);
+
+ intrc = md.getScale(i);
+ LOG.debug("Scale is " + intrc);
+
+ boolean boolrc = md.isAutoIncrement(i);
+ LOG.debug("Is auto increment? " + boolrc);
+
+ boolrc = md.isCaseSensitive(i);
+ LOG.debug("Is case sensitive? " + boolrc);
+
+ boolrc = md.isCurrency(i);
+ LOG.debug("Is currency? " + boolrc);
+
+ intrc = md.getScale(i);
+ LOG.debug("Scale is " + intrc);
+
+ intrc = md.isNullable(i);
+ LOG.debug("Is nullable? " + intrc);
+
+ boolrc = md.isReadOnly(i);
+ LOG.debug("Is read only? " + boolrc);
+
+ }
+
+ while (rs.next()) {
+ LOG.debug("bo = " + rs.getBoolean(1));
+ LOG.debug("bo = " + rs.getBoolean("bo"));
+ LOG.debug("ti = " + rs.getByte(2));
+ LOG.debug("ti = " + rs.getByte("ti"));
+ LOG.debug("db = " + rs.getDouble(3));
+ LOG.debug("db = " + rs.getDouble("db"));
+ LOG.debug("fl = " + rs.getFloat(4));
+ LOG.debug("fl = " + rs.getFloat("fl"));
+ LOG.debug("i = " + rs.getInt(5));
+ LOG.debug("i = " + rs.getInt("i"));
+ LOG.debug("lo = " + rs.getLong(6));
+ LOG.debug("lo = " + rs.getLong("lo"));
+ LOG.debug("sh = " + rs.getShort(7));
+ LOG.debug("sh = " + rs.getShort("sh"));
+ LOG.debug("st = " + rs.getString(8));
+ LOG.debug("st = " + rs.getString("st"));
+ LOG.debug("tm = " + rs.getObject(8));
+ LOG.debug("tm = " + rs.getObject("st"));
+ LOG.debug("tm was null " + rs.wasNull());
+ }
+ LOG.debug("bo is column " + rs.findColumn("bo"));
+
+ int intrc = rs.getConcurrency();
+ LOG.debug("concurrency " + intrc);
+
+ intrc = rs.getFetchDirection();
+ LOG.debug("fetch direction " + intrc);
+
+ intrc = rs.getType();
+ LOG.debug("type " + intrc);
+
+ Statement copy = rs.getStatement();
+
+ SQLWarning warning = rs.getWarnings();
+ while (warning != null) {
+ LOG.debug("Found a warning: " + warning.getMessage());
+ warning = warning.getNextWarning();
+ }
+ rs.clearWarnings();
+ }
}
- }
}