You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:27:32 UTC

[01/50] [abbrv] bigtop git commit: ODPI-184, tests for thrift.

Repository: bigtop
Updated Branches:
  refs/heads/master 0f2436b01 -> a05d3813f


ODPI-184, tests for thrift.

(cherry picked from commit f63ccbc0c36907c5d363ae8030043acdc8017565)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/f6ac2338
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/f6ac2338
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/f6ac2338

Branch: refs/heads/master
Commit: f6ac2338701eae17cb4016023ed19630a129ef07
Parents: 3bf2419
Author: Alan Gates <ga...@hortonworks.com>
Authored: Wed Oct 19 16:30:10 2016 -0400
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:08 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle    |  12 +
 .../odpi/specs/runtime/hive/JdbcConnector.java  |   8 +
 .../org/odpi/specs/runtime/hive/TestThrift.java | 266 +++++++++++++++++++
 3 files changed, 286 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/f6ac2338/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index 1285a19..4df0dae 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -16,11 +16,23 @@
  * limitations under the License.
  */
 def junitVersion = '4.11'
+
+repositories {
+  maven {
+    url "http://conjars.org/repo/"
+  }
+}
 dependencies {
   compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
   compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
   compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
+  compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
+  compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
   testCompile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
+  testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
   if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
 }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/f6ac2338/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 4f15ab4..f5cc379 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -34,6 +34,8 @@ public class JdbcConnector {
   protected static final String USER = "odpi.test.hive.jdbc.user";
   protected static final String PASSWD = "odpi.test.hive.jdbc.password";
   protected static final String LOCATION = "odpi.test.hive.location";
+  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
+  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
 
   protected static Connection conn;
 
@@ -65,4 +67,10 @@ public class JdbcConnector {
     return val;
   }
 
+  protected static boolean testActive(String property, String description) {
+    String val = System.getProperty(property, "true");
+    LOG.debug(description + " is " + val);
+    return Boolean.valueOf(val);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/f6ac2338/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
new file mode 100644
index 0000000..1aede96
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -0,0 +1,266 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.DropPartitionsRequest;
+import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.RequestPartsSpec;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
+import org.apache.thrift.TException;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Random;
+
+public class TestThrift {
+
+  private static ThriftHiveMetastore.Iface client = null;
+  private static HiveConf conf;
+
+  private Random rand;
+
+  @BeforeClass
+  public static void connect() throws MetaException {
+    if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
+      String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
+      conf = new HiveConf();
+      conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
+      client = new HiveMetaStore.HMSHandler("ODPi test", conf, true);
+    }
+  }
+
+  @Before
+  public void checkIfActive() {
+    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
+    rand = new Random();
+  }
+
+  @Test
+  public void db() throws TException {
+    final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
+
+    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
+        "write to");
+    Database db = new Database(dbName, "a db", location, new HashMap<String, String>());
+    client.create_database(db);
+    db = client.get_database(dbName);
+    Assert.assertNotNull(db);
+    db = new Database(db);
+    db.getParameters().put("a", "b");
+    client.alter_database(dbName, db);
+    List<String> alldbs = client.get_databases("odpi_*");
+    Assert.assertNotNull(alldbs);
+    Assert.assertTrue(alldbs.size() > 0);
+    alldbs = client.get_all_databases();
+    Assert.assertNotNull(alldbs);
+    Assert.assertTrue(alldbs.size() > 0);
+    client.drop_database(dbName, true, true);
+  }
+
+  // Not testing types calls, as they aren't used AFAIK
+
+  @Test
+  public void nonPartitionedTable() throws TException {
+    final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
+    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
+        "write to");
+
+    // I don't test every operation related to tables, but only those that are frequently used.
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema fs = new FieldSchema("a", "int", "no comment");
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), location,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    EnvironmentContext envContext = new EnvironmentContext(new HashMap<String, String>());
+    client.create_table_with_environment_context(table, envContext);
+
+    table = client.get_table("default", tableName);
+    Assert.assertNotNull(table);
+
+    List<Table> tables =
+        client.get_table_objects_by_name("default", Collections.singletonList(tableName));
+    Assert.assertNotNull(tables);
+    Assert.assertEquals(1, tables.size());
+
+    List<String> tableNames = client.get_tables("default", "odpi_*");
+    Assert.assertNotNull(tableNames);
+    Assert.assertTrue(tableNames.size() >= 1);
+
+    tableNames = client.get_all_tables("default");
+    Assert.assertNotNull(tableNames);
+    Assert.assertTrue(tableNames.size() >= 1);
+
+    List<FieldSchema> cols = client.get_fields("default", tableName);
+    Assert.assertNotNull(cols);
+    Assert.assertEquals(1, cols.size());
+
+    cols = client.get_schema_with_environment_context("default", tableName, envContext);
+    Assert.assertNotNull(cols);
+    Assert.assertEquals(1, cols.size());
+
+    table = new Table(table);
+    table.getParameters().put("a", "b");
+    client.alter_table_with_cascade("default", tableName, table, false);
+
+    table.getParameters().put("c", "d");
+    client.alter_table_with_environment_context("default", tableName, table, envContext);
+
+    client.drop_table_with_environment_context("default", tableName, true, envContext);
+  }
+
+  @Test
+  public void partitionedTable() throws TException {
+    final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
+    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
+        "write to");
+
+    // I don't test every operation related to tables, but only those that are frequently used.
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema fs = new FieldSchema("a", "int", "no comment");
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), location,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    FieldSchema pk = new FieldSchema("pk", "string", "");
+    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    EnvironmentContext envContext = new EnvironmentContext(new HashMap<String, String>());
+    client.create_table_with_environment_context(table, envContext);
+
+    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/x",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>());
+    client.add_partition_with_environment_context(partition, envContext);
+
+    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/y",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    partition = new Partition(Collections.singletonList("y"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>());
+    client.add_partitions(Collections.singletonList(partition));
+
+    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/z",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    partition = new Partition(Collections.singletonList("z"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>());
+    AddPartitionsRequest rqst = new AddPartitionsRequest("default", tableName,
+        Collections.singletonList(partition), true);
+    client.add_partitions_req(rqst);
+
+    List<Partition> parts = client.get_partitions("default", tableName, (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(3, parts.size());
+
+    parts = client.get_partitions_with_auth("default", tableName, (short)-1, "me",
+        Collections.<String>emptyList());
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(3, parts.size());
+
+    parts = client.get_partitions_ps("default", tableName, Collections.singletonList("x"),
+        (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    parts = client.get_partitions_by_filter("default", tableName, "pk = \"x\"", (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    parts = client.get_partitions_by_names("default", tableName, Collections.singletonList("pk=x"));
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    partition = client.get_partition("default", tableName, Collections.singletonList("x"));
+    Assert.assertNotNull(partition);
+
+    partition = client.get_partition_by_name("default", tableName, "pk=x");
+    Assert.assertNotNull(partition);
+
+    partition = client.get_partition_with_auth("default", tableName, Collections.singletonList("x"),
+        "me", Collections.<String>emptyList());
+    Assert.assertNotNull(partition);
+
+    List<String> partitionNames = client.get_partition_names("default", tableName, (short)-1);
+    Assert.assertNotNull(partitionNames);
+    Assert.assertEquals(3, partitionNames.size());
+
+    partition = new Partition(partition);
+    partition.getParameters().put("a", "b");
+    client.alter_partition("default", tableName, partition);
+
+    for (Partition p : parts) p.getParameters().put("c", "d");
+    client.alter_partitions("default", tableName, parts);
+
+    // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
+    // from the parser.  The odds that anyone other than Hive parser would call this method seem
+    // low, since you'd have to exactly match the serliazation of the Hive parser.
+
+    // Not testing partition marking events, not used by anyone but Hive replication AFAIK
+
+    client.drop_partition_by_name_with_environment_context("default", tableName, "pk=x", true,
+        envContext);
+    client.drop_partition_with_environment_context("default", tableName,
+        Collections.singletonList("y"), true, envContext);
+    DropPartitionsRequest dropRequest = new DropPartitionsRequest("default", tableName,
+        RequestPartsSpec.names(Collections.singletonList("pk=z")));
+    client.drop_partitions_req(dropRequest);
+  }
+
+  // Not testing index calls, as no one uses indices
+
+
+  // Not sure if anyone uses stats calls or not.  Other query engines might.  Ignoring for now.
+
+  // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
+  // won't be runable.
+
+  // Not testing authorization calls as AFAIK no one else uses Hive security
+
+  // Not testing transaction/locking calls, as those are used only by Hive.
+
+  // Not testing notification logging calls, as those are used only by Hive replication.
+
+}


[03/50] [abbrv] bigtop git commit: Added dependencies for apache commons-exec.

Posted by rv...@apache.org.
Added dependencies for apache commons-exec.

(cherry picked from commit 0d7dc0380e942b443189a19b93b96b3250332431)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/62dbaf77
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/62dbaf77
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/62dbaf77

Branch: refs/heads/master
Commit: 62dbaf77de9c197e70c3a0424bebdef8a678d2ce
Parents: 1b8079c
Author: Raj Desai <rd...@us.ibm.com>
Authored: Mon Oct 24 17:18:16 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:09 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/62dbaf77/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index 4df0dae..5505550 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -25,6 +25,7 @@ repositories {
 dependencies {
   compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
   compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
+  compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3'
   compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
   compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
   compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'


[04/50] [abbrv] bigtop git commit: ODPI-182. Adding Hive CLI tests.

Posted by rv...@apache.org.
ODPI-182. Adding Hive CLI tests.

(cherry picked from commit 92b1299f560b5aaba613d2eb27a0c6cca6172f18)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/1b8079c0
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/1b8079c0
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/1b8079c0

Branch: refs/heads/master
Commit: 1b8079c04b682c8576b5115197d209c6c9ef4e20
Parents: f6ac233
Author: Raj Desai <rd...@us.ibm.com>
Authored: Mon Oct 24 17:00:31 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:09 2017 -0700

----------------------------------------------------------------------
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 101 +++++++++
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 213 +++++++++++++++++++
 2 files changed, 314 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/1b8079c0/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
new file mode 100644
index 0000000..2ac9cc8
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecuteResultHandler;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.exec.environment.EnvironmentUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HiveHelper {
+	
+	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
+	
+	
+	public static Map<String, String> execCommand(CommandLine commandline) {
+		
+		System.out.println("Executing command:");
+		System.out.println(commandline.toString());
+		Map<String, String> env = null;
+		Map<String, String> entry = new HashMap<String, String>();
+		try {
+			env = EnvironmentUtils.getProcEnvironment();
+		} catch (IOException e1) {
+			// TODO Auto-generated catch block
+			e1.printStackTrace();
+		}
+
+		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
+		Executor executor = new DefaultExecutor();
+		executor.setExitValue(1);
+		executor.setWatchdog(watchdog);
+		executor.setStreamHandler(streamHandler);
+		try {
+			executor.execute(commandline, env, resultHandler);
+		} catch (ExecuteException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		try {
+			resultHandler.waitFor();
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			return entry;
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			e.printStackTrace();		
+			return entry;
+		}
+	}
+	
+	protected static String getProperty(String property, String description) {
+		String val = System.getProperty(property);
+		if (val == null) {
+			throw new RuntimeException("You must set the property " + property + " with " +
+				description);
+		}
+		LOG.debug(description + " is " + val);
+		return val;
+	 }
+	
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/1b8079c0/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
new file mode 100644
index 0000000..18ee81d
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.AfterClass;
+import org.junit.Assert;
+
+public class TestCLI {
+	
+	static Map<String, String> results;
+	
+	@BeforeClass
+	public static void setup(){
+		
+		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+
+	}
+	
+	@Test
+	public void help(){		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+		//LOG.info(results.get("exitValue"));
+		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+	}
+	 
+	@Test
+	public void sqlFromCmdLine(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void sqlFromFiles() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql"));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql"));
+	}
+	
+	@Test
+	public void silent() {
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S"));
+		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent"));
+		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+	}
+	
+	@Test
+	public void verbose(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v"));
+		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose"));
+		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
+	}
+	
+	@Test
+	public void initialization() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES"));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void database(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );"));
+		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );"));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi"));
+		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi"));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void hiveConf(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES"));
+		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES"));
+		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+	}
+	
+	@Test
+	public void variableSubsitution() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}
+		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES < hive-define.sql", false));		
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES < hive-define.sql", false));		
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void hiveVar() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		}
+		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES < hive-var.sql", false));		
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES < hiveconf-var.sql", false));		
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@AfterClass
+	public static void cleanup(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+	}
+	 
+}


[37/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
deleted file mode 100644
index f247841..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
+++ /dev/null
@@ -1,337 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
-
-import java.sql.SQLException;
-import java.sql.Statement;
-
-// This does not test every option that Hive supports, but does try to touch the major
-// options, especially anything unique to Hive.  See each test for areas tested and not tested.
-public class TestSql extends JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
-
-  @Test
-  public void db() throws SQLException {
-    final String db1 = "odpi_sql_db1";
-    final String db2 = "odpi_sql_db2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop database if exists " + db1 + " cascade");
-
-      // Simple create database
-      stmt.execute("create database " + db1);
-      stmt.execute("drop database " + db1);
-
-      stmt.execute("drop schema if exists " + db2 + " cascade");
-
-      String location = getProperty(LOCATION, "a writable directory in HDFS");
-
-      // All the bells and whistles
-      stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
-          "' with dbproperties ('a' = 'b')");
-
-      stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
-
-      stmt.execute("drop database " + db2 + " restrict");
-    }
-  }
-
-  @Test
-  public void table() throws SQLException {
-    final String table1 = "odpi_sql_table1";
-    final String table2 = "odpi_sql_table2";
-    final String table3 = "odpi_sql_table3";
-    final String table4 = "odpi_sql_table4";
-    final String table5 = "odpi_sql_table5";
-
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("drop table if exists " + table2);
-      stmt.execute("drop table if exists " + table3);
-      stmt.execute("drop table if exists " + table4);
-      stmt.execute("drop table if exists " + table5);
-
-      String location = getProperty(LOCATION, "a writable directory in HDFS");
-      stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
-          location + "'");
-
-      // With a little bit of everything, except partitions, we'll do those below
-      stmt.execute("create table if not exists " + table2 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int comment 'a column comment'," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal," +
-          " c8 decimal(12)," +
-          " c9 decimal(8,2)," +
-          " c10 timestamp," +
-          " c11 date," +
-          " c12 string," +
-          " c13 varchar(120)," +
-          " c14 char(10)," +
-          " c15 boolean," +
-          " c16 binary," +
-          " c17 array<string>," +
-          " c18 map <string, string>," +
-          " c19 struct<s1:int, s2:bigint>," +
-          " c20 uniontype<int, string>) " +
-          "comment 'table comment'" +
-          "clustered by (c1) sorted by (c2) into 10 buckets " +
-          "stored as orc " +
-          "tblproperties ('a' = 'b')");
-
-      // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
-
-      stmt.execute("create temporary table " + table3 + " like " + table2);
-
-      stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
-
-      stmt.execute("create table " + table4 + " as select a, b from " + table1);
-
-      stmt.execute("truncate table " + table4);
-
-      stmt.execute("alter table " + table4 + " rename to " + table5);
-      stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
-
-      // Not testing alter of clustered or sorted by, because that's suicidal
-      // Not testing alter of skewed or serde properties since we didn't test it for create
-      // above.
-
-      stmt.execute("drop table " + table1 + " purge");
-      stmt.execute("drop table " + table2);
-      stmt.execute("drop table " + table3);
-      stmt.execute("drop table " + table5);
-    }
-  }
-
-  @Test
-  public void partitionedTable() throws SQLException {
-    final String table1 = "odpi_sql_ptable1";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-
-      stmt.execute("create table " + table1 +
-          "(c1 int," +
-          " c2 varchar(32))" +
-          "partitioned by (p1 string comment 'a partition column')" +
-          "stored as orc");
-
-      stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
-      stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
-      stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
-
-      stmt.execute("alter table " + table1 + " add columns (c3 float)");
-      stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
-
-      // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
-      // set location, enable/disable no_drop/offline, compact (because not everyone may have
-      // ACID on), change column
-
-      stmt.execute("drop table " + table1);
-
-    }
-  }
-
-  @Test
-  public void view() throws SQLException {
-    final String table1 = "odpi_sql_vtable1";
-    final String view1 = "odpi_sql_view1";
-    final String view2 = "odpi_sql_view2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("drop view if exists " + view1);
-      stmt.execute("drop view if exists " + view2);
-      stmt.execute("create table " + table1 + "(a int, b varchar(32))");
-      stmt.execute("create view " + view1 + " as select a from " + table1);
-
-      stmt.execute("create view if not exists " + view2 +
-          " comment 'a view comment' " +
-          "tblproperties ('a' = 'b') " +
-          "as select b from " + table1);
-
-      stmt.execute("alter view " + view1 + " as select a, b from " + table1);
-      stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
-
-      stmt.execute("drop view " + view1);
-      stmt.execute("drop view " + view2);
-    }
-  }
-
-  // Not testing indices because they are currently useless in Hive
-  // Not testing macros because as far as I know no one uses them
-
-  @Test
-  public void function() throws SQLException {
-    final String func1 = "odpi_sql_func1";
-    final String func2 = "odpi_sql_func2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("create temporary function " + func1 +
-          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
-      stmt.execute("drop temporary function " + func1);
-
-      stmt.execute("drop function if exists " + func2);
-
-      stmt.execute("create function " + func2 +
-          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
-      stmt.execute("drop function " + func2);
-    }
-  }
-
-  // Not testing grant/revoke/roles as different vendors use different security solutions
-  // and hence different things will work here.
-
-  // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
-  // values and select), and multi-insert.  Load is not tested as there's no guarantee that the
-  // test machine has access to HDFS and thus the ability to upload a file.
-  @Test
-  public void insert() throws SQLException {
-    final String table1 = "odpi_insert_table1";
-    final String table2 = "odpi_insert_table2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("create table " + table1 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal(8,2)," +
-          " c8 varchar(120)," +
-          " c9 char(10)," +
-          " c10 boolean)" +
-          " partitioned by (p1 string)");
-
-      // insert with partition
-      stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
-      stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
-
-      // dynamic partition
-      stmt.execute("explain insert into " + table1 + " partition (p1) values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
-
-      stmt.execute("drop table if exists " + table2);
-
-      stmt.execute("create table " + table2 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal(8,2)," +
-          " c8 varchar(120)," +
-          " c9 char(10)," +
-          " c10 boolean)");
-
-      stmt.execute("explain insert into " + table2 + " values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
-      stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
-          "c7, c8, c9, c10 from " + table1);
-
-      // multi-insert
-      stmt.execute("from " + table1 +
-          " insert into table " + table1 + " partition (p1 = 'c') " +
-          " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
-          " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
-    }
-  }
-
-  // This tests CTEs
-  @Test
-  public void cte() throws SQLException {
-    final String table1 = "odpi_cte_table1";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
-      stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
-          " select c1 from cte1");
-    }
-  }
-
-  // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
-  // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
-  // subqueries, and over.
-
-  @Test
-  public void select() throws SQLException {
-    final String[] tables = {"odpi_select_table1", "odpi_select_table2"};
-    try (Statement stmt = conn.createStatement()) {
-      for (int i = 0; i < tables.length; i++) {
-        stmt.execute("drop table if exists " + tables[i]);
-        stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
-      }
-
-      // single table queries tested above in several places
-
-      stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
-          "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "group by a.c2 " +
-          "order by a.c2 asc " +
-          "limit 10");
-
-      stmt.execute("explain select distinct a.c2 " +
-          "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "order by a.c2 desc ");
-
-      stmt.execute("explain select a.c2, SUM(a.c1) " +
-          "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "group by a.c2 " +
-          "having SUM(b.c1) > 0 " +
-          "order by a.c2 ");
-
-      stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
-          "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
-
-      stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
-
-      stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
-      stmt.execute("explain select * from " + tables[0] + " cluster by c1");
-
-      stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
-      stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
-          ")");
-
-    }
-
-  }
-
-  // Update and delete are not tested because not everyone configures their system to run
-  // with ACID.
-
-
-}
-
-
-
-
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
deleted file mode 100644
index 8e0abda..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ /dev/null
@@ -1,251 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.thrift.TException;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Random;
-
-public class TestThrift {
-
-  private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
-
-  private static IMetaStoreClient client = null;
-  private static HiveConf conf;
-
-  private Random rand;
-
-  @BeforeClass
-  public static void connect() throws MetaException {
-    if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
-      String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
-      conf = new HiveConf();
-      conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
-      LOG.info("Set to test against metastore at " + url);
-      client = new HiveMetaStoreClient(conf);
-    }
-  }
-
-  @Before
-  public void checkIfActive() {
-    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
-    rand = new Random();
-  }
-
-  @Test
-  public void db() throws TException {
-    final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
-
-    Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
-    client.createDatabase(db);
-    db = client.getDatabase(dbName);
-    Assert.assertNotNull(db);
-    db = new Database(db);
-    db.getParameters().put("a", "b");
-    client.alterDatabase(dbName, db);
-    List<String> alldbs = client.getDatabases("odpi_*");
-    Assert.assertNotNull(alldbs);
-    Assert.assertTrue(alldbs.size() > 0);
-    alldbs = client.getAllDatabases();
-    Assert.assertNotNull(alldbs);
-    Assert.assertTrue(alldbs.size() > 0);
-    client.dropDatabase(dbName, true, true);
-  }
-
-  // Not testing types calls, as they aren't used AFAIK
-
-  @Test
-  public void nonPartitionedTable() throws TException {
-    final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
-
-    // I don't test every operation related to tables, but only those that are frequently used.
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    table = client.getTable("default", tableName);
-    Assert.assertNotNull(table);
-
-    List<Table> tables =
-        client.getTableObjectsByName("default", Collections.singletonList(tableName));
-    Assert.assertNotNull(tables);
-    Assert.assertEquals(1, tables.size());
-
-    List<String> tableNames = client.getTables("default", "odpi_*");
-    Assert.assertNotNull(tableNames);
-    Assert.assertTrue(tableNames.size() >= 1);
-
-    tableNames = client.getAllTables("default");
-    Assert.assertNotNull(tableNames);
-    Assert.assertTrue(tableNames.size() >= 1);
-
-    List<FieldSchema> cols = client.getFields("default", tableName);
-    Assert.assertNotNull(cols);
-    Assert.assertEquals(1, cols.size());
-
-    cols = client.getSchema("default", tableName);
-    Assert.assertNotNull(cols);
-    Assert.assertEquals(1, cols.size());
-
-    table = new Table(table);
-    table.getParameters().put("a", "b");
-    client.alter_table("default", tableName, table, false);
-
-    table.getParameters().put("c", "d");
-    client.alter_table("default", tableName, table);
-
-    client.dropTable("default", tableName, true, false);
-  }
-
-  @Test
-  public void partitionedTable() throws TException {
-    final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
-
-    // I don't test every operation related to tables, but only those that are frequently used.
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    FieldSchema pk = new FieldSchema("pk", "string", "");
-    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>());
-    client.add_partition(partition);
-
-    List<Partition> partitions = new ArrayList<>(2);
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>()));
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>()));
-    client.add_partitions(partitions);
-
-    List<Partition> parts = client.listPartitions("default", tableName, (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(3, parts.size());
-
-    parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
-        (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me",
-        Collections.<String>emptyList());
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(3, parts.size());
-
-    List<String> partNames = client.listPartitionNames("default", tableName, (short)-1);
-    Assert.assertNotNull(partNames);
-    Assert.assertEquals(3, partNames.size());
-
-    parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    partition = client.getPartition("default", tableName, Collections.singletonList("x"));
-    Assert.assertNotNull(partition);
-
-    partition = client.getPartition("default", tableName, "pk=x");
-    Assert.assertNotNull(partition);
-
-    partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
-        "me", Collections.<String>emptyList());
-    Assert.assertNotNull(partition);
-
-    partition = new Partition(partition);
-    partition.getParameters().put("a", "b");
-    client.alter_partition("default", tableName, partition);
-
-    for (Partition p : parts) p.getParameters().put("c", "d");
-    client.alter_partitions("default", tableName, parts);
-
-    // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
-    // from the parser.  The odds that anyone other than Hive parser would call this method seem
-    // low, since you'd have to exactly match the serliazation of the Hive parser.
-
-    // Not testing partition marking events, not used by anyone but Hive replication AFAIK
-
-    client.dropPartition("default", tableName, "pk=x", true);
-    client.dropPartition("default", tableName, Collections.singletonList("y"), true);
-  }
-
-  // Not testing index calls, as no one uses indices
-
-
-  // Not sure if anyone uses stats calls or not.  Other query engines might.  Ignoring for now.
-
-  // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
-  // won't be runable.
-
-  // Not testing authorization calls as AFAIK no one else uses Hive security
-
-  // Not testing transaction/locking calls, as those are used only by Hive.
-
-  // Not testing notification logging calls, as those are used only by Hive replication.
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py b/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py
deleted file mode 100755
index 091c496..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/python/find-public-apis.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import os
-import re
-import warnings
-from optparse import OptionParser
-
-def main():
-  parser = OptionParser()
-  parser.add_option("-d", "--directory", help="Top level directory of source tree")
-  parser.add_option("-r", "--report", help="API compatibility report file, in HTML format")
-
-  (options, args) = parser.parse_args()
-
-  # Get the ATS endpoint if it's not given.
-  if options.directory == None:
-    print "You must specify a top level directory of the source tree"
-    return 1
-
-  if options.report == None:
-    print "You must specify the report to check against"
-    return 1
-
-  publicClasses = set()
-  for directory in os.walk(options.directory):
-    for afile in directory[2]:
-      if re.search("\.java$", afile) != None:
-        handle = open(os.path.join(directory[0], afile))
-        # Figure out the package we're in
-        pre = re.search("org/apache/hadoop[\w/]*", directory[0])
-        if pre == None:
-           warnings.warn("No package for " + directory[0])
-           continue
-        package = pre.group(0)
-        expecting = 0
-        for line in handle:
-          if re.search("@InterfaceAudience.Public", line) != None:
-            expecting = 1
-          classname = re.search("class (\w*)", line)
-          if classname != None and expecting == 1:
-            publicClasses.add(package + "/" + classname.group(1))
-            expecting = 0
-        handle.close()
-
-  handle = open(options.report)
-  haveChecked = set()
-  for line in handle:
-    classre = re.search("mangled: <b>(org/apache/hadoop[\w/]+)", line)
-    if classre != None:
-      classname = classre.group(1)
-      if classname not in haveChecked:
-        if classname in publicClasses:
-          print "Warning, found change in public class " + classname
-        haveChecked.add(classname)
-  handle.close()
-  
-
-
-
-main()
-
-      


[27/50] [abbrv] bigtop git commit: Update hadoop-common.list (cherry picked from commit e36bfa4975c621ba7117b10d32b3d8411df4e481)

Posted by rv...@apache.org.
Update hadoop-common.list
(cherry picked from commit e36bfa4975c621ba7117b10d32b3d8411df4e481)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/c65c0008
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/c65c0008
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/c65c0008

Branch: refs/heads/master
Commit: c65c0008bd6e4577e6673bd2b6e6a86304f371ad
Parents: 933a770
Author: Raj Desai <rd...@us.ibm.com>
Authored: Wed Mar 8 15:56:53 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:15 2017 -0700

----------------------------------------------------------------------
 .../spec-tests/runtime/src/test/resources/hadoop-common.list | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/c65c0008/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
index 6a7f35c..73ff182 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
@@ -74,7 +74,7 @@ client/xercesImpl[\.\-_].*jar
 client/commons-codec[\.\-_].*jar
 client/netty-3\.6\.2\.Final[\.\-_].*jar
 client/commons-collections[\.\-_].*jar
-client/httpcore-4\.2\.5[\.\-_].*jar
+client/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
 client/hadoop-mapreduce-client-jobclient[\.\-_].*jar
 client/htrace-core[\.\-_].*jar
 client/jersey-core-1\.9[\.\-_].*jar
@@ -139,7 +139,7 @@ client/protobuf-java-2\.5\.0[\.\-_].*jar
 client/hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
 client/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
 client/commons-lang[\.\-_].*jar
-client/httpclient-4\.2\.5[\.\-_].*jar
+client/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
 client/paranamer[\.\-_].*jar
 client/hadoop-yarn-api[\.\-_].*jar
 client/jersey-client[\.\-_].*jar
@@ -176,7 +176,7 @@ lib/jetty-util-6\.1\.26[\.\-_].*jar
 lib/avro-1\.7\.[4-7][\.\-_].*jar
 lib/jaxb-impl-2\.2\.3-1[\.\-_].*jar
 lib/netty-3\.6\.2\.Final[\.\-_].*jar
-lib/httpcore-4\.2\.5[\.\-_].*jar
+lib/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
 lib/jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
 lib/jersey-core-1\.9[\.\-_].*jar
 lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
@@ -219,7 +219,7 @@ lib/htrace-core-3\.1\.0-incubating[\.\-_].*jar
 lib/protobuf-java-2\.5\.0[\.\-_].*jar
 lib/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
 lib/java-xmlbuilder-0\.4[\.\-_].*jar
-lib/httpclient-4\.2\.5[\.\-_].*jar
+lib/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
 lib/guava-11\.0\.2[\.\-_].*jar
 lib/jsr305-3\.0\.0[\.\-_].*jar
 lib/jackson-jaxrs-1\.9\.13[\.\-_].*jar


[20/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
new file mode 100644
index 0000000..6061c5e
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-mapreduce-client-core","version":"2.7.3","classes":{"org.apache.hadoop.mapred.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapred.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configu
 ration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileSplit":{"name":"org.apache.hadoop.mapred.lib.CombineFileSplit","methods":{"org.apache.hadoop.mapred.JobConf getJob()":{"name":"getJob","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) t
 hrows java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"except
 ions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.HashPartitioner":{"name":"org.apache.hadoop.mapred.lib.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputFormat":{"name":"org.apache.hadoop.mapreduce.OutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.CounterGroup":{"name":"org.apache.hadoop.mapreduce.CounterGroup","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Clas
 s;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void setAggregatorDescriptors(org.apache.hadoop.mapred.JobConf, [Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Ljava.lang.Class;"],"exceptions":[]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void main([Ljava.lang.String;) throws java.io.IOException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.la
 ng.Class;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.J
 obConf","args":["[Ljava.lang.String;","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.InvalidInputException":{"name":"org.apache.hadoop.mapreduce.lib.input.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters$Counter":{"name
 ":"org.apache.hadoop.mapred.Counters$Counter","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean contentEquals(org.apache.hadoop.mapred.Counters$Counter)":{"name":"contentEquals","returnType":"boolean","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getName()":{"name"
 :"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getCounter()":{"name":"getCounter","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper","methods":{"long getPos() throws java.io.IOException"
 :{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LongSumReducer":{"name":"org.apache.hadoop.mapred.lib.LongSumReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.had
 oop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength(int)":{"name":"getLength","returnType":"long","args":["int"],"exceptions":[]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getPaths()":{"name":"getPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.
 DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getOffset(int)":{"name":"getOffset","returnType":"long","args":["int"],"exceptions":[]},"org.apache.hadoop.fs.Path getPath(int)":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":["int"],"exceptions":[]},"[J getLengths()":{"name":"getLengths","returnType":"[J","args":[],"exceptions":[]},"[J getStartOffsets()":{"name":"getStartOffsets","returnType":"[J","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumPaths()":{"name":"getNumPaths","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","methods":{"java.lang.String getInputQuery()":{"name":"getInputQuery","re
 turnType":"java.lang.String","args":[],"exceptions":[]},"void setInputClass(java.lang.Class)":{"name":"setInputClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setOutputFieldCount(int)":{"name":"setOutputFieldCount","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getInputTableName()":{"name":"getInputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"[Ljava.lang.String; getInputFieldNames()":{"name":"getInputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setOutputTableName(java.lang.String)":{"name":"setOutputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.sql.Connection getConnection() throws java.sql.SQLException, java.lang.ClassNotFoundException":{"name":"getConnection","returnType":"java.sql.Connectio
 n","args":[],"exceptions":["java.sql.SQLException","java.lang.ClassNotFoundException"]},"java.lang.String getInputBoundingQuery()":{"name":"getInputBoundingQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getInputOrderBy()":{"name":"getInputOrderBy","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getInputClass()":{"name":"getInputClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setInputTableName(java.lang.String)":{"name":"setInputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputCountQuery(java.lang.String)":{"name":"setInputCountQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputOrderBy(java.lang.String)":{"name":"setInputOrderBy","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getOutputFieldCount()":{"name":"getOutputFieldCount","returnType":"int","args":[],"exceptions":[]},"void setInputConditions(ja
 va.lang.String)":{"name":"setInputConditions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputQuery(java.lang.String)":{"name":"setInputQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getInputConditions()":{"name":"getInputConditions","returnType":"java.lang.String","args":[],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String"],"exceptions":[]},"void setInputBoundingQuery(java.lang.String)":{"name":"setInputBoundingQuery","
 returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputFieldNames([Ljava.lang.String;)":{"name":"setInputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"[Ljava.lang.String; getOutputFieldNames()":{"name":"getOutputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.String getOutputTableName()":{"name":"getOutputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setOutputFieldNames([Ljava.lang.String;)":{"name":"setOutputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"java.lang.String getInputCountQuery()":{"name":"getInputCountQuery","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.Partitioner":{"name":"org.apache.hadoop.mapred.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int
 "],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOE
 xception":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],
 "exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getCurrentValue()":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType"
 :"float","args":[],"exceptions":["java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void setKeyValue(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, [B, int, int)":{"name":"setKeyValue","returnType":"void","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text","[B","int","int"],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","
 returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":
 ["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainMapper":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.RecordReader":{"name":"org.apache.hadoop.mapred.RecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.had
 oop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileOutputFormat":{"name":"org.apache.hadoop.mapred.FileOutputFormat","methods":{"void setOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","o
 rg.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getTaskOutputPath(org.apache.hadoop.mapred.JobConf, java.lang.String) throws java.io.IOException":{"name":"getTaskOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":["java.io.IOException"]},"void setOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void setCompressOutput(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"java.lang.Stri
 ng getUniqueName(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getUniqueName","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getPathForCustomFile(org.apache.hadoop.mapred.JobConf, java.lang.String)"
 :{"name":"getPathForCustomFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setWorkOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setWorkOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean getCompressOutput(org.apache.hadoop.mapred.JobConf)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","retur
 nType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException
 , java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},
 "float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader) throws java.lang.In
 terruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnTyp
 e":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.JoinRecordReader":{"name":"org.apache.hadoop.mapred.join.JoinRecordReader","methods":{"org.apache.hadoop.mapred.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapred.join.TupleWritable","args":[],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.mapred.join.TupleWritable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.mapred.join.TupleWritable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java
 .lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.TextOutputFormat":{"name":"org.apache.hadoop.mapred.TextOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf,
  java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.pipes.Submitter":{"name":"org.apache.hadoop.mapred.pipes.Submitter","methods":{"boolean getKeepCommandFile(org.apache.hadoop.mapred.JobConf)":{"name":"getKeepCommandFile","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob jobSubmit(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"jobSubmit","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaMapper(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaMapper","returnType":"void","arg
 s":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordWriter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean getIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordWriter","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaReducer(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaReducer","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.Job
 Conf"],"exceptions":["java.io.IOException"]},"void setIsJavaRecordReader(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordReader","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.String getExecutable(org.apache.hadoop.mapred.JobConf)":{"name":"getExecutable","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void setKeepCommandFile(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setKeepCommandFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaReducer(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setExecutable(org.apache.hadoop.m
 apred.JobConf, java.lang.String)":{"name":"setExecutable","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"boolean getIsJavaMapper(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaMapper","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaRecordReader(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordReader","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache
 .hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"
 ]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.ClusterStatus":{"name":"org.apache.hadoop.mapred.ClusterStatus","methods":{"int getTaskTrackers()":{"name":"getTaskTrackers","returnType":"int","args":[],"exceptions":[]},"int getMaxMapTasks()":{"name":"getMaxMapTasks","returnType":"int","args":[],"exceptions":[]},"long getMaxMemory()":{"name":"getMaxMemory","returnType":"long","args":[],"exceptions":[]},"int getMaxReduceTasks()":{"name":"getMaxReduceTasks","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getGraylistedTrackerNames()":{"name":"getGraylistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatu
 s getJobTrackerStatus()":{"name":"getJobTrackerStatus","returnType":"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus","args":[],"exceptions":[]},"int getReduceTasks()":{"name":"getReduceTasks","returnType":"int","args":[],"exceptions":[]},"int getGraylistedTrackers()":{"name":"getGraylistedTrackers","returnType":"int","args":[],"exceptions":[]},"long getTTExpiryInterval()":{"name":"getTTExpiryInterval","returnType":"long","args":[],"exceptions":[]},"long getUsedMemory()":{"name":"getUsedMemory","returnType":"long","args":[],"exceptions":[]},"java.util.Collection getActiveTrackerNames()":{"name":"getActiveTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getMapTasks()":{"name":"getMapTasks","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobTracker$State getJobTr
 ackerState()":{"name":"getJobTrackerState","returnType":"org.apache.hadoop.mapred.JobTracker$State","args":[],"exceptions":[]},"int getBlacklistedTrackers()":{"name":"getBlacklistedTrackers","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getBlacklistedTrackerNames()":{"name":"getBlacklistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.util.Collection getBlackListedTrackersInfo()":{"name":"getBlackListedTrackersInfo","returnType":"java.util.Collection","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumExcludedNodes()":{"name":"getNumExcludedNodes","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.MapReduceBase":{"name":"org.apache.hadoop.mapred.MapReduceBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","retur
 nType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.TupleWritable":{"name":"org.apache.hadoop.mapred.join.TupleWritable","methods":{}},"org.apache.hadoop.mapred.ID":{"name":"org.apache.hadoop.mapred.ID","methods":{}},"org.apache.hadoop.mapred.lib.RegexMapper":{"name":"org.apache.hadoop.mapred.lib.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.
 io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator":{"name":"org.ap
 ache.hadoop.mapred.lib.aggregate.ValueAggregator","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String, long)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","args":["java.lang.String","long"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnTy
 pe":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void addNextValue(double)":{"name":"addNextValue","returnType":"void","args":["double"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"double getSum()":{"name":"getSum","returnType":"double","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggr
 egate.LongValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputSplit":{"name":"org.apache.hadoop.mapreduce.InputSplit","methods":{"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"[Ljava
 .lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapred.lib.TotalOrderPartitioner","methods":{"void setPartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.mapred.JobConf)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoo
 p.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Counter":{"name":"org.apache.hadoop.mapreduce.Counter","methods":{"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":
 [],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapreduce.lib.db.DBConfiguration getDBConf()":{"name":"getDBConf","returnType":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setC
 onf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.sql.Connection createConnection()":{"name":"createConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.sql.Connection getConnection()":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"java.lang.String getDBProductName()":{"name":"getDBProductName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lan
 g.Class","java.lang.String","java.lang.String"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boo
 lean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapred.join.CompositeInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Str
 ing, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setFormat(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"
 java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Partitioner":{"name":"org.apache.hadoop.mapreduce.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext)":{"name":"checkOutputSpe
 cs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iter
 able","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent$Status":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","methods":{"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.JobContext":{"name":"org.apache.hadoop.mapred.JobContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.had
 oop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputCommitter":{"name":"org.apache.hadoop.mapreduce.OutputCommitter","methods":{"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args
 ":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJ
 ob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram","methods":{}},"o
 rg.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputS
 plit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.InputSplit":{"name":"org.apache.hadoop.mapred.InputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"
 name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"long getSum()":{"name":"getSum","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue()":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProg
 ress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey()":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.A
 rrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader","methods":{"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.j
 obcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration setAggregatorDescriptors([Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"org.apache.hadoop.conf.Configuration","args":["[Ljava.lang.Class;"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"org.apache.hadoop.mapreduce.Job createValueA
 ggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob(org.apache.hadoop.conf.Configuration, [Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMin","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueSum","methods":{}},"org.apache.hadoop.mapred.JobID":{"name":"org.apache.hadoop.mapred.JobID","methods":{"java.lang.String getJobIDsPattern(java.lang.String, java.lan
 g.Integer)":{"name":"getJobIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapred.JobID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.JobID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID downgrade(org.apache.hadoop.mapreduce.JobID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobID","args":["org.apache.hadoop.mapreduce.JobID"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.FileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptio
 ns":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","arg
 s":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer
 $Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMax","methods":{}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache
 .hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apac
 he.hadoop.mapred.Reducer":{"name":"org.apache.hadoop.mapred.Reducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.JobControl":{"name":"org.apache.hadoop.mapred.jobcontrol.JobControl","methods":{"java.util.ArrayList getReadyJobs()":{"name":"getReadyJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getFailedJobs()":{"name":"getFailedJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getSuccessfulJobs()":{"name":"getSuccessfulJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getWaitingJobs()":{"name":"getWaitingJob
 s","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getRunningJobs()":{"name":"getRunningJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addJobs(java.util.Collection)":{"name":"addJobs","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ResetableIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ResetableIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay
 (org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(org.apache.hadoo
 p.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapred.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocation
 (int) throws java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapred.InputSplit) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.InputSplit"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapred.InputSplit","args":["int"],"exceptions":[]},"long getLength(int) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"re
 adFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.BinaryPartitioner":{"name":"org.apache.hadoop.mapred.lib.BinaryPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator","methods":{"void setKeyFieldComparatorOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Conf
 iguration","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.MultiFileSplit":{"name":"org.apache.hadoop.mapred.MultiFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobQueueInfo":{"name":"org.apache.hadoop.mapred.JobQueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJobStatuses([Lorg.apache.hadoop.mapreduce.JobStatus;)":{"name":"setJobStatuses","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":[]},"void setChildren(java.util.List)":{"name":"setChildren","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getChildren()":{"name":"getChildren","returnType":"java.util.List","args":[],"exceptions":[]
 },"void setQueueState(java.lang.String)":{"name":"setQueueState","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getQueueState()":{"name":"getQueueState","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setProperties(java.util.Properties)":{"name":"setProperties","returnType":"void","args":["java.util.Properties"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, [Ljava.lang.String;)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":
 "checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, int)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","int"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordWriter":{"name":"org.apache.hadoop.mapred.RecordWriter","methods":{"void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"
 close","returnType":"void","args":["org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"org.apache.hadoop.mapred.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader","methods":{"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable cre
 ateValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.TupleWritable":{"name":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(int)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lan
 g.Object"],"exceptions":[]},"boolean has(int)":{"name":"has","returnType":"boolean","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineTextInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.IdentityReducer":{"name":"org.apache.hadoop.mapred.lib.IdentityReducer","methods":{"void reduce(java.lang.Object, java.util.
 Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskID":{"name":"org.apache.hadoop.mapreduce.TaskID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType(char)":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["char"],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getAllTaskTypes()":{"name":"getAllTaskTypes","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"char getRepresentingCharacter(org.apache.hadoop.mapreduce.TaskType)":{"name":"getRepresentingCharacter","returnType":"char","args":["org.apache.hadoop.mapreduce.TaskType"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.
 JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.filecache.DistributedCache":{"name":"org.apache.hadoop.filecache.DistributedCache","methods":{"void setLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"long getTimestamp(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getTimestamp","returnType":"long","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOEx
 ception"]},"void setFileTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setFileTimestamps","r

<TRUNCATED>

[28/50] [abbrv] bigtop git commit: Added tests into TestSpecsRuntime and testRuntimeSpecConf so they get run. Tested against an HDP cluster.

Posted by rv...@apache.org.
Added tests into TestSpecsRuntime and testRuntimeSpecConf so they get run.  Tested against an HDP cluster.

(cherry picked from commit a911703ccacc70b738720c560574fc8a9aee8b2b)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/77e0d6e0
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/77e0d6e0
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/77e0d6e0

Branch: refs/heads/master
Commit: 77e0d6e01f3e43f4a7e51a588b2261780c587ae2
Parents: c65c000
Author: Alan Gates <al...@gmail.com>
Authored: Thu Mar 9 15:39:13 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:15 2017 -0700

----------------------------------------------------------------------
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 19 +-----
 .../src/main/resources/api-examiner-checker.sh  | 52 ---------------
 .../src/main/resources/api-examiner-prep.sh     | 38 ++++++++---
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  | 32 ++++++++++
 .../test/resources/testRuntimeSpecConf.groovy   | 66 ++++++++++++++++++++
 5 files changed, 130 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/77e0d6e0/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index 2ae97a2..d95c010 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -1,11 +1,7 @@
 /**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
  * <p>
  * http://www.apache.org/licenses/LICENSE-2.0
  * <p>
@@ -62,15 +58,6 @@ public class ApiExaminer {
     unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
     unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
 
-    /*
-    jarsWeCareAbout = new ArrayList<>();
-    jarsWeCareAbout.add("hadoop-common");
-    jarsWeCareAbout.add("hadoop-hdfs");
-    jarsWeCareAbout.add("hadoop-yarn-common");
-    jarsWeCareAbout.add("hadoop-yarn-client");
-    jarsWeCareAbout.add("hadoop-yarn-api");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-core");
-    */
   }
 
   public static void main(String[] args) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/77e0d6e0/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
deleted file mode 100755
index 1cb9583..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env bash
-
-############################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# <p>
-# http://www.apache.org/licenses/LICENSE-2.0
-# <p>
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-############################################################################
-
-function usage() {
-    echo "You must set the following variables:  HADOOP_COMMON_HOME HADOOP_COMMON_DIR HADOOP_COMMON_LIB_JARS_DIR "
-    echo "HADOOP_HDFS_HOME HDFS_DIR HDFS_LIB_JARS_DIR HADOOP_YARN_HOME YARN_DIR YARN_LIB_JARS_DIR "
-    echo "HADOOP_MAPRED_HOME MAPRED_DIR MAPRED_LIB_JARS_DIR BIGTOP_HOME (location of bigtop source)"
-    echo "You can get the Hadoop environment variables by using hadoop envvars, hdfs envvars, yarn envvars, and mapred envvars"
-}
-
-for envar in x$HADOOP_COMMON_HOME x$HADOOP_COMMON_DIR x$HADOOP_COMMON_LIB_JARS_DIR x$HADOOP_HDFS_HOME x$HDFS_DIR \
-             x$HDFS_LIB_JARS_DIR x$HADOOP_YARN_HOME x$YARN_DIR x$YARN_LIB_JARS_DIR x$HADOOP_MAPRED_HOME x$MAPRED_DIR \
-             x$MAPRED_LIB_JARS_DIR
-do
-    if [ "${envar}" = "x" ]
-    then
-        usage
-        exit 1
-    fi
-done
-
-
-for dir in $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ $HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR \
-            $HADOOP_COMMON_HOME/$HADOOP_COMMON_LIB_JARS_DIR $HADOOP_HDFS_HOME/$HDFS_DIR \
-            $HADOOP_HDFS_HOME/$HDFS_LIB_JARS_DIR $HADOOP_YARN_HOME/$YARN_DIR $HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR \
-            $HADOOP_MAPRED_HOME/$MAPRED_DIR $HADOOP_MAPRED_HOME/$MAPRED_LIB_JARS_DIR
-do
-    for jar in `find $dir -name \*.jar`
-    do
-        CLASSPATH=$CLASSPATH:$jar
-    done
-done
-
-java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/77e0d6e0/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
index 0bdfe05..8c9ab5e 100755
--- a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
+++ b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
@@ -1,16 +1,12 @@
 #!/usr/bin/env bash
 
 ############################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# <p>
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# <p>
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,6 +14,30 @@
 # limitations under the License.
 ############################################################################
 
+############################################################################
+# This script is used to generate the hadoop-*-api.report.json files in the
+# test/resources directory.  To use it, you will first need to download an
+# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the
+# directory where you untar that distribution.  You will then need to set
+# BIGTTOP_HOME to the directory where your bigtop source is located.  Then
+# run this script for each of the jars you want to generate a report for.
+# The arguments passed to this script should be -p <outputdir> -j <jarfile>
+# where outputdir is the directory you'd like to write the report to and
+# jarfile is the full path of the jar to generate the report for.  Reports
+# should be generated for the following jars: hadoop-common, hadoop-hdfs,
+# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and
+# hadoop-mapreduce-client-core
+#
+# Example usage:
+# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3
+# export BIGTOP_HOME=/home/me/git/bigtop
+# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
+#
+# The resulting reports should be committed to git.  This script only needs
+# to be run once per ODPi release.
+############################################################################
+
+
 if [ "x${APACHE_HADOOP_DIR}" = "x" ]
 then
     echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"

http://git-wip-us.apache.org/repos/asf/bigtop/blob/77e0d6e0/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
index 2426278..bc2a3b2 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
+++ b/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
@@ -25,6 +25,9 @@ import org.junit.runner.RunWith
 import org.junit.runners.Parameterized
 import org.junit.runners.Parameterized.Parameters
 
+import java.util.regex.Matcher
+import java.util.regex.Pattern
+
 /**
  * Check all expected environment
  * Tests are constructed dynamically, using external DSL to define
@@ -236,6 +239,35 @@ public class TestSpecsRuntime {
           }
         )
         break
+      case 'api_examination':
+        def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        def libdir = getEnv(arguments['libDir'], arguments['envcmd'])
+
+        def dir = new File(basedir + "/" + libdir)
+        Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory())
+        def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar")
+        def String[] jars = dir.list(new FilenameFilter() {
+          @Override
+          boolean accept(File d, String name) {
+            Matcher matcher = pattern.matcher(name)
+            return (matcher.matches() && !name.contains("test"))
+          }
+        })
+        Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
+        def jar = dir.getAbsolutePath() + "/" + jars[0]
+
+        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
+        def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
+        Shell sh = new Shell()
+        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
+        int rc = sh.getRet()
+        Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
+        if (results.size() > 0) {
+          System.out.println("Received report for jar " + arguments['jar'] + results.join("\n"))
+        }
+        break;
+
+
       default:
         break
     }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/77e0d6e0/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
index f5521c5..339de4c 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
@@ -360,5 +360,71 @@ specs {
       arguments {
       }
     }
+    'HADOOP_API1' {
+      name = "HADOOP_API1"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        libDir = 'HADOOP_COMMON_DIR'
+        envcmd = 'hadoop envvars'
+        jar = 'hadoop-common'
+        resourceFile = 'hadoop-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API2' {
+      name = "HADOOP_API2"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        libDir = 'HDFS_DIR'
+        envcmd = 'hdfs envvars'
+        jar = 'hadoop-hdfs'
+        resourceFile = 'hadoop-hdfs-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API3' {
+      name = "HADOOP_API3"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-common'
+        resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API4' {
+      name = "HADOOP_API4"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-client'
+        resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API5' {
+      name = "HADOOP_API5"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-api'
+        resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API6' {
+      name = "HADOOP_API6"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        libDir = 'MAPRED_DIR'
+        envcmd = 'mapred envvars'
+        jar = 'hadoop-mapreduce-client-core'
+        resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json'
+      }
+    }
   }
 }


[14/50] [abbrv] bigtop git commit: A few other last minute changes to make it sort of work with hard wired values.

Posted by rv...@apache.org.
A few other last minute changes to make it sort of work with hard wired values.

(cherry picked from commit 9bac863d4181a7bb964fa793770b359ac5cd97b0)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/ab4f414a
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/ab4f414a
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/ab4f414a

Branch: refs/heads/master
Commit: ab4f414a945bcbf3924f4c4390b569b3b9175c0d
Parents: f9c6c65
Author: Alan Gates <ga...@hortonworks.com>
Authored: Mon Nov 7 16:39:01 2016 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:12 2017 -0700

----------------------------------------------------------------------
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 12 ++-
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 86 ++------------------
 2 files changed, 19 insertions(+), 79 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/ab4f414a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index a4477ff..3e56224 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -36,8 +36,13 @@ import org.apache.commons.logging.LogFactory;
 public class HiveHelper {
 	
 	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
-		
+
 	public static Map<String, String> execCommand(CommandLine commandline) {
+		return execCommand(commandline, null);
+	}
+
+	public static Map<String, String> execCommand(CommandLine commandline,
+																								Map<String, String> envVars) {
 		
 		System.out.println("Executing command:");
 		System.out.println(commandline.toString());
@@ -50,6 +55,11 @@ public class HiveHelper {
 			LOG.debug("Failed to get process environment: "+ e1.getMessage());
 			e1.printStackTrace();
 		}
+		if (envVars != null) {
+			for (String key : envVars.keySet()) {
+				env.put(key, envVars.get(key));
+			}
+		}
 
 		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
 		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

http://git-wip-us.apache.org/repos/asf/bigtop/blob/ab4f414a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 4b61131..bb237d8 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -32,20 +32,8 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.thrift.TException;
 import org.junit.Assert;
 import org.junit.Assume;
@@ -54,14 +42,12 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 import java.io.IOException;
-import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
-import java.util.StringTokenizer;
 
 
 public class TestHCatalog {
@@ -136,10 +122,6 @@ public class TestHCatalog {
     // Write some stuff into a file in the location of the table
     table = client.getTable("default", inputTable);
     String inputFile = table.getSd().getLocation() + "/input";
-    /*
-    String inputFile = JdbcConnector.getProperty(JdbcConnector.LOCATION,
-        "Directory to write a file in ") + "/odpi_hcat_input_" + rand.nextInt(Integer.MAX_VALUE);
-        */
     Path inputPath = new Path(inputFile);
     FileSystem fs = FileSystem.get(conf);
     FSDataOutputStream out = fs.create(inputPath);
@@ -149,76 +131,24 @@ public class TestHCatalog {
     out.writeChars("the lamb was sure to go\n");
     out.close();
 
-    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hadoop")
+    Map<String, String> env = new HashMap<>();
+    env.put("HADOOP_HOME","/Users/gates/grid/odpi-testing/hadoop-2.7.3");
+    env.put("HADOOP_CLASSPATH", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/hcatalog/share/hcatalog/hive-hcatalog-core-1.2.1.jar");
+    env.put("HIVE_HOME", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin");
+    Map<String, String> results = HiveHelper.execCommand(new CommandLine("/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/bin/hive")
+        .addArgument("--service")
         .addArgument("jar")
         .addArgument("/Users/gates/git/bigtop/runtime-1.2.0-SNAPSHOT.jar")
         .addArgument(HCatalogMR.class.getName())
         .addArgument(inputTable)
         .addArgument(outputTable)
         .addArgument(inputSchema.getSchemaAsTypeString())
-        .addArgument(outputSchema.getSchemaAsTypeString()));
+        .addArgument(outputSchema.getSchemaAsTypeString()), env);
+    LOG.info(results.toString());
     Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
 
-
-
-    /*
-    Job job = new Job(conf, "odpi_hcat_test");
-    HCatInputFormat.setInput(job, "default", inputTable);
-
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setJarByClass(TestHCatalog.class);
-    job.setMapperClass(Map.class);
-    job.setReducerClass(Reduce.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(IntWritable.class);
-    job.setOutputKeyClass(WritableComparable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
-    HCatOutputFormat.setSchema(job, outputSchema);
-    job.setOutputFormatClass(HCatOutputFormat.class);
-
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));
-
-    Assert.assertTrue(job.waitForCompletion(true));
-    */
-
     client.dropTable("default", inputTable);
     client.dropTable("default", outputTable);
   }
 
-  /*
-  public static class Map extends Mapper<WritableComparable,
-        HCatRecord, Text, IntWritable> {
-    private final static IntWritable one = new IntWritable(1);
-    private Text word = new Text();
-
-    @Override
-    protected void map(WritableComparable key, HCatRecord value, Context context)
-        throws IOException, InterruptedException {
-      String line = value.getString("line", inputSchema);
-      StringTokenizer tokenizer = new StringTokenizer(line);
-      while (tokenizer.hasMoreTokens()) {
-        word.set(tokenizer.nextToken());
-        context.write(word, one);
-      }
-    }
-  }
-
-  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
-    @Override
-    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
-        IOException, InterruptedException {
-      int sum = 0;
-      for (IntWritable i : values) {
-        sum += i.get();
-      }
-      HCatRecord output = new DefaultHCatRecord(2);
-      output.set("word", outputSchema, key);
-      output.set("count", outputSchema, sum);
-      context.write(null, output);
-    }
-  }
-  */
 }


[30/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Cleaning up license headers


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/c66106b6
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/c66106b6
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/c66106b6

Branch: refs/heads/master
Commit: c66106b631af67525ee88e4ad60b7cf97384a279
Parents: 5e342c4
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 09:17:36 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:16 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/smoke-tests/odpi-runtime/README.md | 15 +---------
 .../smoke-tests/odpi-runtime/build.gradle       |  8 ++---
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 18 +++++++-----
 .../org/odpi/specs/runtime/hive/HCatalogMR.java |  8 ++---
 .../src/main/resources/api-examiner-prep.sh     | 14 ++++++---
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  |  8 ++---
 .../org/odpi/specs/runtime/hive/HiveHelper.java |  8 ++---
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  8 ++---
 .../odpi/specs/runtime/hive/TestBeeline.java    | 31 ++++++++++----------
 .../org/odpi/specs/runtime/hive/TestCLI.java    |  8 ++---
 .../odpi/specs/runtime/hive/TestHCatalog.java   |  8 ++---
 .../org/odpi/specs/runtime/hive/TestJdbc.java   |  8 ++---
 .../org/odpi/specs/runtime/hive/TestSql.java    |  8 ++---
 .../org/odpi/specs/runtime/hive/TestThrift.java |  8 ++---
 .../test/resources/testRuntimeSpecConf.groovy   |  8 ++---
 15 files changed, 81 insertions(+), 85 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/README.md
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/README.md b/bigtop-tests/smoke-tests/odpi-runtime/README.md
index 8fde997..19a1b84 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/README.md
+++ b/bigtop-tests/smoke-tests/odpi-runtime/README.md
@@ -31,18 +31,5 @@ Running the tests
 
 Tests could be executed by running the following command 
 ```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
+  gradle :bigtop-tests:smoke-tests:odpi-runtime:test -Psmoke.tests --info
 ```
-=======
-consists of two essential parts: a configuration file, communicating the
-functional commands and expected outcome(s) of it; and the test driver to run
-the commands and compare the results.
-
-Running the tests
-=================
-
-Tests could be executed by running the following command
-```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
-```
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
index 97e3635..1b9558c 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
+++ b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index d95c010..05d0eae 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -1,10 +1,14 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 4110d5d..2091e34 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
index 8c9ab5e..d062f17 100755
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
@@ -1,17 +1,23 @@
 #!/usr/bin/env bash
 
 ############################################################################
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
 #
-# http://www.apache.org/licenses/LICENSE-2.0
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
 ############################################################################
 
 ############################################################################

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
index bc2a3b2..46bdbed 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index 3e56224..72d34c7 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 7512dab..095eb54 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index 578621a..5e6e89c 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -1,16 +1,4 @@
-package org.odpi.specs.runtime.hive;
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.util.Map;
-
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,15 +6,26 @@ import java.util.Map;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.odpi.specs.runtime.hive;
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
 
 public class TestBeeline {
 	

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index 2b70909..61bb9ac 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 0ea49ce..e8c4763 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
index 154fd9c..6a99a17 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
index f247841..99451d1 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index 8e0abda..58c6595 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c66106b6/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
index 339de4c..8852dd8 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.


[31/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list
deleted file mode 100644
index d3861b9..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-bin.list
+++ /dev/null
@@ -1,3 +0,0 @@
-mapred
-yarn
-container-executor

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
deleted file mode 100644
index f62ee8e..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnExceptio
 n, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apach
 e.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache
 .hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, 
 java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apac
 he.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.
 yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util
 .Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.rec
 ords.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args
 ":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"v
 oid setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunch
 Context"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.
 async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Prior
 ity","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.ap
 ache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],
 "exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":
 "allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport g
 etApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org
 .apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["or
 g.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.uti
 l.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.r
 ecords.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws
  org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptio
 ns":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOExc
 eption":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient creat
 eNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void 
 cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
deleted file mode 100644
index b394bff..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTo
 kenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long",
 "args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","r
 eturnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService
 (org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.had
 oop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list
deleted file mode 100644
index 26613d4..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-jar.list
+++ /dev/null
@@ -1,38 +0,0 @@
-netty-3\.6\.2\.Final[\.\-_].*jar
-leveldbjni-all-1\.8[\.\-_].*jar
-jackson-core-asl-1\.9\.13[\.\-_].*jar
-jackson-xc-1\.9\.13[\.\-_].*jar
-jersey-server-1\.9[\.\-_].*jar
-stax-api-1\.0-2[\.\-_].*jar
-zookeeper-3\.4\.6[\.\-_].*jar
-guice-3\.0[\.\-_].*jar
-jaxb-impl-2\.2\.3-1[\.\-_].*jar
-zookeeper-3\.4\.6.*-tests\.jar
-jersey-client-1\.9[\.\-_].*jar
-commons-cli-1\.2[\.\-_].*jar
-log4j-1\.2\.17[\.\-_].*jar
-jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-guava-11\.0\.2[\.\-_].*jar
-jetty-6\.1\.26[\.\-_].*jar
-commons-logging-1\.1\.3[\.\-_].*jar
-jersey-core-1\.9[\.\-_].*jar
-jersey-guice-1\.9[\.\-_].*jar
-commons-compress-1\.4\.1[\.\-_].*jar
-jettison-1\.1[\.\-_].*jar
-commons-collections-3\.2\.[12][\.\-_].*jar
-xz-1\.0[\.\-_].*jar
-asm-3\.2[\.\-_].*jar
-commons-codec-1\.4[\.\-_].*jar
-aopalliance-1\.0[\.\-_].*jar
-javax\.inject-1[\.\-_].*jar
-commons-lang-2\.6[\.\-_].*jar
-jetty-util-6\.1\.26[\.\-_].*jar
-jsr305-3\.0\.0[\.\-_].*jar
-protobuf-java-2\.5\.0[\.\-_].*jar
-commons-io-2\.4[\.\-_].*jar
-activation-1\.1[\.\-_].*jar
-jersey-json-1\.9[\.\-_].*jar
-jaxb-api-2\.2\.2[\.\-_].*jar
-guice-servlet-3\.0[\.\-_].*jar
-servlet-api-2\.5[\.\-_].*jar
-jackson-jaxrs-1\.9\.13[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list
deleted file mode 100644
index bb88005..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn.list
+++ /dev/null
@@ -1,74 +0,0 @@
-hadoop-yarn-server-sharedcachemanager.*\.jar
-bin
-bin/mapred
-bin/container-executor
-bin/yarn
-sbin
-sbin/yarn-daemon\.sh
-sbin/yarn-daemons\.sh
-hadoop-yarn-registry-2\.7\.[0-9].*\.jar
-hadoop-yarn-applications-unmanaged-am-launcher-2\.7\.[0-9].*\.jar
-hadoop-yarn-common-2\.7\.[0-9].*\.jar
-hadoop-yarn-server-nodemanager.*\.jar
-hadoop-yarn-server-applicationhistoryservice-2\.7\.[0-9].*\.jar
-hadoop-yarn-server-common.*\.jar
-etc
-etc/hadoop
-hadoop-yarn-server-common-2\.7\.[0-9].*\.jar
-hadoop-yarn-server-tests.*\.jar
-hadoop-yarn-server-resourcemanager.*\.jar
-hadoop-yarn-server-web-proxy.*\.jar
-hadoop-yarn-api-2\.7\.[0-9].*\.jar
-hadoop-yarn-common.*\.jar
-hadoop-yarn-server-web-proxy-2\.7\.[0-9].*\.jar
-hadoop-yarn-applications-distributedshell-2\.7\.[0-9].*\.jar
-hadoop-yarn-server-tests-2\.7\.[0-9].*\.jar
-hadoop-yarn-server-resourcemanager-2\.7\.[0-9].*\.jar
-hadoop-yarn-registry.*\.jar
-hadoop-yarn-server-sharedcachemanager-2\.7\.[0-9].*\.jar
-hadoop-yarn-client-2\.7\.[0-9].*\.jar
-hadoop-yarn-applications-distributedshell.*\.jar
-hadoop-yarn-server-nodemanager-2\.7\.[0-9].*\.jar
-hadoop-yarn-api.*\.jar
-hadoop-yarn-client.*\.jar
-lib
-lib/commons-cli-1\.2.*\.jar
-lib/leveldbjni-all-1\.8.*\.jar
-lib/jaxb-api-2\.2\.2.*\.jar
-lib/jettison-1\.1.*\.jar
-lib/commons-io-2\.4.*\.jar
-lib/jetty-util-6\.1\.26.*\.jar
-lib/jaxb-impl-2\.2\.3-1.*\.jar
-lib/jersey-guice-1\.9.*\.jar
-lib/netty-3\.6\.2\.Final.*\.jar
-lib/jersey-core-1\.9.*\.jar
-lib/jackson-mapper-asl-1\.9\.13.*\.jar
-lib/asm-3\.2.*\.jar
-lib/commons-compress-1\.4\.1.*\.jar
-lib/aopalliance-1\.0.*\.jar
-lib/jackson-xc-1\.9\.13.*\.jar
-lib/jersey-json-1\.9.*\.jar
-lib/commons-codec-1\.4.*\.jar
-lib/jackson-core-asl-1\.9\.13.*\.jar
-lib/servlet-api-2\.5.*\.jar
-lib/jetty-6\.1\.26.*\.jar
-lib/jersey-server-1\.9.*\.jar
-lib/log4j-1\.2\.17.*\.jar
-lib/zookeeper-3\.4\.6.*-tests\.jar
-lib/stax-api-1\.0-2.*\.jar
-lib/jersey-client-1\.9.*\.jar
-lib/xz-1\.0.*\.jar
-lib/zookeeper-3\.4\.6.*\.jar
-lib/activation-1\.1.*\.jar
-lib/javax\.inject-1.*\.jar
-lib/protobuf-java-2\.5\.0.*\.jar
-lib/guice-3\.0.*\.jar
-lib/guava-11\.0\.2.*\.jar
-lib/jsr305-3\.0\.0.*\.jar
-lib/jackson-jaxrs-1\.9\.13.*\.jar
-lib/commons-collections-3\.2\.[1-2].*\.jar
-lib/commons-logging-1\.1\.3.*\.jar
-lib/commons-lang-2\.6.*\.jar
-lib/guice-servlet-3\.0.*\.jar
-hadoop-yarn-server-applicationhistoryservice.*\.jar
-hadoop-yarn-applications-unmanaged-am-launcher.*\.jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
deleted file mode 100644
index 339de4c..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/testRuntimeSpecConf.groovy
+++ /dev/null
@@ -1,430 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-specs {
-  tests {
-    'HADOOP_EJH1' {
-      name = 'HADOOP_EJH1'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'JAVA_HOME'
-      }
-    }
-    'HADOOP_EC1' {
-      name = 'HADOOP_EC1'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'HADOOP_TOOLS_PATH'
-        donotcheckexistance = true
-      }
-    }
-    'HADOOP_EC2' {
-      name = 'HADOOP_EC2'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'HADOOP_COMMON_HOME'
-      }
-    }
-    'HADOOP_EC3' {
-      name = 'HADOOP_EC3'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'HADOOP_COMMON_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EC4' {
-      name = 'HADOOP_EC4'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'HADOOP_COMMON_LIB_JARS_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EC5' {
-      name = 'HADOOP_EC5'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hadoop envvars'
-        variable = 'HADOOP_CONF_DIR'
-      }
-    }
-    'HADOOP_EH1' {
-      name = 'HADOOP_EH1'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hdfs envvars'
-        variable = 'HADOOP_HDFS_HOME'
-      }
-    }
-    'HADOOP_EH2' {
-      name = 'HADOOP_EH2'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hdfs envvars'
-        variable = 'HDFS_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EH3' {
-      name = 'HADOOP_EH3'
-      type = 'envdir'
-      arguments {
-        envcmd = 'hdfs envvars'
-        variable = 'HDFS_LIB_JARS_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EY1' {
-      name = 'HADOOP_EY1'
-      type = 'envdir'
-      arguments {
-        envcmd = 'yarn envvars'
-        variable = 'HADOOP_YARN_HOME'
-      }
-    }
-    'HADOOP_EY2' {
-      name = 'HADOOP_EY2'
-      type = 'envdir'
-      arguments {
-        envcmd = 'yarn envvars'
-        variable = 'YARN_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EY3' {
-      name = 'HADOOP_EY3'
-      type = 'envdir'
-      arguments {
-        envcmd = 'yarn envvars'
-        variable = 'YARN_LIB_JARS_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EM1' {
-      name = 'HADOOP_EM1'
-      type = 'envdir'
-      arguments {
-        envcmd = 'mapred envvars'
-        variable = 'HADOOP_MAPRED_HOME'
-      }
-    }
-    'HADOOP_EM2' {
-      name = 'HADOOP_EM2'
-      type = 'envdir'
-      arguments {
-        envcmd = 'mapred envvars'
-        variable = 'MAPRED_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EM3' {
-      name = 'HADOOP_EM3'
-      type = 'envdir'
-      arguments {
-        envcmd = 'mapred envvars'
-        variable = 'MAPRED_LIB_JARS_DIR'
-        relative = true
-      }
-    }
-    'HADOOP_EJH2_HADOOP' {
-      name = 'HADOOP_EJH2_HADOOP'
-      type = 'shell'
-      arguments {
-        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/hadoop-env.sh'
-        message = 'JAVA_HOME is not set'
-      }
-    }
-    'HADOOP_EJH2_YARN' {
-      name = 'HADOOP_EJH2_YARN'
-      type = 'shell'
-      arguments {
-        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/yarn-env.sh'
-        message = 'JAVA_HOME is not set'
-      }
-    }
-    'HADOOP_PLATVER_1' {
-      name = 'HADOOP_PLATVER'
-      type = 'shell'
-      arguments {
-        command = 'hadoop version | head -n 1 | grep -E \'Hadoop\\s+[0-9\\.]+[_\\-][A-Za-z_0-9]+\''
-        message = 'Hadoop\'s version string is not correct'
-      }
-    }
-    'HADOOP_DIRSTRUCT_COMMON' {
-      name = 'HADOOP_DIRSTRUCT_COMMON'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hadoop envvars'
-        baseDirEnv = 'HADOOP_COMMON_HOME'
-        referenceList = 'hadoop-common.list'
-      }
-    }
-    'HADOOP_DIRSTRUCT_HDFS' {
-      name = 'HADOOP_DIRSTRUCT_HDFS'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hdfs envvars'
-        baseDirEnv = 'HADOOP_HDFS_HOME'
-        referenceList = 'hadoop-hdfs.list'
-      }
-    }
-    'HADOOP_DIRSTRUCT_MAPRED' {
-      name = 'HADOOP_DIRSTRUCT_MAPRED'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'mapred envvars'
-        baseDirEnv = 'HADOOP_MAPRED_HOME'
-        referenceList = 'hadoop-mapreduce.list'
-      }
-    }
-    'HADOOP_DIRSTRUCT_YARN' {
-      name = 'HADOOP_DIRSTRUCT_YARN'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'yarn envvars'
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        referenceList = 'hadoop-yarn.list'
-      }
-    }
-    'HADOOP_SUBPROJS' {
-      name = 'HADOOP_SUBPROJS'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hadoop envvars'
-        baseDirEnv = 'HADOOP_COMMON_HOME'
-        referenceList = 'hadoop-subprojs.list'
-      }
-    }
-    'HADOOP_BINCONTENT_COMMON' {
-      name = 'HADOOP_BINCONTENT_COMMON'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hadoop envvars'
-        baseDirEnv = 'HADOOP_COMMON_HOME'
-        subDir = 'bin'
-        referenceList = 'hadoop-common-bin.list'
-      }
-    }
-    'HADOOP_BINCONTENT_HDFS' {
-      name = 'HADOOP_BINCONTENT_HDFS'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hdfs envvars'
-        baseDirEnv = 'HADOOP_HDFS_HOME'
-        subDir = 'bin'
-        referenceList = 'hadoop-hdfs-bin.list'
-      }
-    }
-    'HADOOP_BINCONTENT_MAPRED' {
-      name = 'HADOOP_BINCONTENT_MAPRED'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'mapred envvars'
-        baseDirEnv = 'HADOOP_MAPRED_HOME'
-        subDir = 'bin'
-        referenceList = 'hadoop-mapreduce-bin.list'
-      }
-    }
-    'HADOOP_BINCONTENT_YARN' {
-      name = 'HADOOP_BINCONTENT_YARN'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'yarn envvars'
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        subDir = 'bin'
-        referenceList = 'hadoop-yarn-bin.list'
-      }
-    }
-    'HADOOP_LIBJARSCONTENT_COMMON' {
-      name = 'HADOOP_JARCONTENT_COMMON'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hadoop envvars'
-        baseDirEnv = 'HADOOP_COMMON_HOME'
-        subDirEnv = 'HADOOP_COMMON_LIB_JARS_DIR'
-        referenceList = 'hadoop-common-jar.list'
-      }
-    }
-    'HADOOP_LIBJARSCONTENT_HDFS' {
-      name = 'HADOOP_JARCONTENT_HDFS'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'hdfs envvars'
-        baseDirEnv = 'HADOOP_HDFS_HOME'
-        subDirEnv = 'HDFS_LIB_JARS_DIR'
-        referenceList = 'hadoop-hdfs-jar.list'
-      }
-    }
-    'HADOOP_LIBJARSCONTENT_MAPRED' {
-      name = 'HADOOP_JARCONTENT_MAPRED'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'mapred envvars'
-        baseDirEnv = 'HADOOP_MAPRED_HOME'
-        subDirEnv = 'MAPRED_LIB_JARS_DIR'
-        referenceList = 'hadoop-mapreduce-jar.list'
-      }
-    }
-    'HADOOP_LIBJARSCONTENT_YARN' {
-      name = 'HADOOP_JARCONTENT_YARN'
-      type = 'dirstruct'
-      arguments {
-        envcmd = 'yarn envvars'
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        subDirEnv = 'YARN_LIB_JARS_DIR'
-        referenceList = 'hadoop-yarn-jar.list'
-      }
-    }
-    'HADOOP_GETCONF' {
-      name = 'HADOOP_GETCONF'
-      type = 'shell'
-      arguments {
-        command = '[ `hdfs getconf -confKey dfs.permissions.superusergroup >/dev/null 2>/dev/null; echo $?` == "0" ]'
-        message = 'It\' not possible to to determine key Hadoop configuration values by using ${HADOOP_HDFS_HOME}/bin/hdfs getconf'
-      }
-    }
-    'HADOOP_CNATIVE1' {
-      name = 'HADOOP_CNATIVE1'
-      type = 'shell'
-      arguments {
-        command = 'hadoop checknative -a 2>/dev/null | grep hadoop | grep true'
-        message = 'hadoop-common-project must be build with -Pnative or -Pnative-win'
-      }
-    }
-    'HADOOP_CNATIVE2' {
-      name = 'HADOOP_CNATIVE2'
-      type = 'shell'
-      arguments {
-        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
-        message = 'hadoop-common-project must be build with -Prequire.snappy'
-      }
-    }
-    'HADOOP_HNATIVE1' {
-      name = 'HADOOP_HNATIVE1'
-      type = 'shell'
-      arguments {
-        command = '[ ! -n ${HADOOP_COMMON_HOME} ] || HADOOP_COMMON_HOME=`hadoop envvars | grep HADOOP_COMMON_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
-            'test -e $HADOOP_COMMON_HOME/lib/native/libhdfs.a'
-        message = 'hadoop-hdfs-project must be build with -Pnative or -Pnative-win'
-      }
-    }
-    'HADOOP_YNATIVE1' {
-      name = 'HADOOP_YNATIVE1'
-      type = 'shell'
-      arguments {
-        command = '[ ! -n ${HADOOP_YARN_HOME} ] || HADOOP_YARN_HOME=`yarn envvars | grep HADOOP_YARN_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
-            'echo $HADOOP_YARN_HOME; test -e $HADOOP_YARN_HOME/bin/container-executor'
-        message = 'hadoop-yarn-project must be build with -Pnative or -Pnative-win'
-      }
-    }
-    'HADOOP_MNATIVE1' {
-      name = 'HADOOP_MNATIVE1'
-      type = 'shell'
-      arguments {
-        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
-        message = 'hadoop-mapreduce-project must be build with -Prequire.snappy'
-      }
-    }
-    'HADOOP_COMPRESSION' {
-      name = 'HADOOP_COMPRESSION'
-      type = 'shell'
-      arguments {
-        command = '[[ "$(hadoop checknative -a 2>/dev/null | egrep -e ^zlib -e ^snappy | sort -u | grep true | wc -l)" == 2 ]]'
-        message = 'hadoop must be built with -Dcompile.native=true'
-      }
-    }
-    'HADOOP_TOOLS' {
-      name = 'HADOOP_TOOLS'
-      type = 'hadoop_tools'
-      arguments {
-      }
-    }
-    'HADOOP_API1' {
-      name = "HADOOP_API1"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_COMMON_HOME'
-        libDir = 'HADOOP_COMMON_DIR'
-        envcmd = 'hadoop envvars'
-        jar = 'hadoop-common'
-        resourceFile = 'hadoop-common-2.7.3-api-report.json'
-      }
-    }
-    'HADOOP_API2' {
-      name = "HADOOP_API2"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_HDFS_HOME'
-        libDir = 'HDFS_DIR'
-        envcmd = 'hdfs envvars'
-        jar = 'hadoop-hdfs'
-        resourceFile = 'hadoop-hdfs-2.7.3-api-report.json'
-      }
-    }
-    'HADOOP_API3' {
-      name = "HADOOP_API3"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        libDir = 'YARN_DIR'
-        envcmd = 'yarn envvars'
-        jar = 'hadoop-yarn-common'
-        resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json'
-      }
-    }
-    'HADOOP_API4' {
-      name = "HADOOP_API4"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        libDir = 'YARN_DIR'
-        envcmd = 'yarn envvars'
-        jar = 'hadoop-yarn-client'
-        resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json'
-      }
-    }
-    'HADOOP_API5' {
-      name = "HADOOP_API5"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_YARN_HOME'
-        libDir = 'YARN_DIR'
-        envcmd = 'yarn envvars'
-        jar = 'hadoop-yarn-api'
-        resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json'
-      }
-    }
-    'HADOOP_API6' {
-      name = "HADOOP_API6"
-      type = 'api_examination'
-      arguments {
-        baseDirEnv = 'HADOOP_MAPRED_HOME'
-        libDir = 'MAPRED_DIR'
-        envcmd = 'mapred envvars'
-        jar = 'hadoop-mapreduce-client-core'
-        resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json'
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/build.gradle
----------------------------------------------------------------------
diff --git a/build.gradle b/build.gradle
index 101ecc7..b0a83b1 100644
--- a/build.gradle
+++ b/build.gradle
@@ -80,6 +80,8 @@ rat {
        "bigtop-tests/smoke-tests/phoenix/*.sql",
        "bigtop-tests/smoke-tests/ignite-hadoop/*.data",
        "bigtop-tests/smoke-tests/tajo/table1/*.csv",
+       "bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/*.json",
+       "bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/*.list",
        "**/target/**",
        "**/build/**",
        "**/.gradle/**",
@@ -427,7 +429,6 @@ task "bigtop-slaves"(type:Exec,
 
 project.afterEvaluate {
   checkClusterTestProjects("smoke.tests")
-  checkClusterTestProjects("spec.tests")
   artifactToInstall(dependsOn: [installTopLevel, installCommon, installConf, installiTest])
 }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/settings.gradle
----------------------------------------------------------------------
diff --git a/settings.gradle b/settings.gradle
index 7ae4bf5..ac071f1 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -31,9 +31,4 @@ fTree.each() { buildFile ->
   def parent = buildFile.getParentFile().name
   include("bigtop-tests:smoke-tests:$parent")
 }
-fTree = fileTree(dir: 'bigtop-tests/spec-tests', include: '*/build.gradle')
-fTree.each() { buildFile ->
-  def parent = buildFile.getParentFile().name
-  include("bigtop-tests:spec-tests:$parent")
-}
 


[50/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Updating namespaces to reflect ODPi -> Bigtop transition


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/0f51fb32
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/0f51fb32
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/0f51fb32

Branch: refs/heads/master
Commit: 0f51fb32531daa6c0b66e6bbd8fc9813023019ba
Parents: c66106b
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 09:37:47 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:17 2017 -0700

----------------------------------------------------------------------
 .../smoke-tests/odpi-runtime/build.gradle       |  4 +-
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  |  2 +-
 .../org/odpi/specs/runtime/hive/HCatalogMR.java |  8 +-
 .../src/main/resources/api-examiner-prep.sh     |  4 +-
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  |  6 +-
 .../org/odpi/specs/runtime/hive/HiveHelper.java |  2 +-
 .../odpi/specs/runtime/hive/JdbcConnector.java  | 20 ++--
 .../odpi/specs/runtime/hive/TestBeeline.java    | 55 +++++------
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 96 ++++++++++----------
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 10 +-
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 10 +-
 .../org/odpi/specs/runtime/hive/TestSql.java    | 36 ++++----
 .../org/odpi/specs/runtime/hive/TestThrift.java | 12 +--
 13 files changed, 133 insertions(+), 132 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
index 1b9558c..6132734 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
+++ b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
@@ -57,7 +57,7 @@ jar {
 test {
   // Change the default location where test data is picked up
   systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
-  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
-  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
+  systemProperty 'bigtop.test.hive.hcat.job.jar', jar.archivePath
+  systemProperty 'bigtop.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
 }
 test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index 05d0eae..a8febdb 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hadoop;
+package org.apache.bigtop.itest.hadoop.api;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.GnuParser;

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 2091e34..6456cf2 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -41,8 +41,8 @@ import java.net.URI;
 import java.util.StringTokenizer;
 
 public class HCatalogMR extends Configured implements Tool {
-  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
-  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
+  private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
+  private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
 
   @Override
   public int run(String[] args) throws Exception {
@@ -68,7 +68,7 @@ public class HCatalogMR extends Configured implements Tool {
     conf.set(INPUT_SCHEMA, inputSchemaStr);
     conf.set(OUTPUT_SCHEMA, outputSchemaStr);
 
-    Job job = new Job(conf, "odpi_hcat_test");
+    Job job = new Job(conf, "bigtop_hcat_test");
     HCatInputFormat.setInput(job, "default", inputTable);
 
     job.setInputFormatClass(HCatInputFormat.class);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
index d062f17..db899c8 100755
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
@@ -40,7 +40,7 @@
 # $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
 #
 # The resulting reports should be committed to git.  This script only needs
-# to be run once per ODPi release.
+# to be run once per Bigtop release.
 ############################################################################
 
 
@@ -66,5 +66,5 @@ do
     CLASSPATH=$CLASSPATH:$jar
 done
 
-java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
+java -cp $CLASSPATH org.apache.bigtop.itest.hadoop.api.ApiExaminer $@
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
index 46bdbed..60d869c 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime
+package org.apache.bigtop.itest.hadoop.odpi 
 
 import groovy.io.FileType
 import org.junit.Assert
@@ -256,10 +256,10 @@ public class TestSpecsRuntime {
         Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
         def jar = dir.getAbsolutePath() + "/" + jars[0]
 
-        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
+        def examinerJar = System.properties['bigtop.test.hive.hcat.job.jar']
         def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
         Shell sh = new Shell()
-        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
+        def results = sh.exec("hadoop jar " + examinerJar + " org.apache.bigtop.itest.hadoop.api.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
         int rc = sh.getRet()
         Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
         if (results.size() > 0) {

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index 72d34c7..ee20588 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 095eb54..3b3ac51 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -30,15 +30,15 @@ import java.util.Properties;
 public class JdbcConnector {
   private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
 
-  protected static final String URL = "odpi.test.hive.jdbc.url";
-  protected static final String USER = "odpi.test.hive.jdbc.user";
-  protected static final String PASSWD = "odpi.test.hive.jdbc.password";
-  protected static final String LOCATION = "odpi.test.hive.location";
-  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
-  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
-  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
-  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
-  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
+  protected static final String URL = "bigtop.test.hive.jdbc.url";
+  protected static final String USER = "bigtop.test.hive.jdbc.user";
+  protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
+  protected static final String LOCATION = "bigtop.test.hive.location";
+  protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
+  protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
+  protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
+  protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
+  protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
 
   protected static Connection conn;
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index 5e6e89c..bc2ab77 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -15,7 +15,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
+
 import org.apache.commons.exec.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -31,9 +32,9 @@ public class TestBeeline {
 	
 	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
 	
-	private static final String URL = "odpi.test.hive.jdbc.url";
-	private static final String USER = "odpi.test.hive.jdbc.user";
-	private static final String PASSWD = "odpi.test.hive.jdbc.password";
+	private static final String URL = "bigtop.test.hive.jdbc.url";
+	private static final String USER = "bigtop.test.hive.jdbc.user";
+	private static final String PASSWD = "bigtop.test.hive.jdbc.password";
 	
 	private static Map<String, String> results;
 	private static String beelineUrl; 
@@ -94,29 +95,29 @@ public class TestBeeline {
 	@Test
 	public void checkBeelineQueryExecFromCmdLine(){
 		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
 		}
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
 	}
 	
 	@Test
 	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
 		
 		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
 		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
 		
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
 		}else{
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
@@ -125,7 +126,7 @@ public class TestBeeline {
 		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
 
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
 		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
 	}
 	
@@ -133,12 +134,12 @@ public class TestBeeline {
 	public void checkBeelineInitFile() throws FileNotFoundException{
 
 		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); }
 		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
 	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
+		if(!results.get("outputStream").contains("bigtop_runtime_beeline_init")){
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
 		}else{
 			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
@@ -146,7 +147,7 @@ public class TestBeeline {
 		
 		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
 		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
 	}
 	
@@ -157,18 +158,18 @@ public class TestBeeline {
 		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
 		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
 		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
 	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
+		if(!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
 		}
 		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
+		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
 	}
 	
 	@Test

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index 61bb9ac..c55bb92 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import java.io.FileNotFoundException;
 import java.io.PrintWriter;
@@ -30,7 +30,7 @@ import org.junit.Assert;
 public class TestCLI {
 	
 	static Map<String, String> results;
-	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
+	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
 	
 	@BeforeClass
 	public static void setup(){
@@ -57,31 +57,31 @@ public class TestCLI {
 		
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void sqlFromFiles() throws FileNotFoundException{
 		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
 			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}else{
 			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
 	}
@@ -106,46 +106,46 @@ public class TestCLI {
 	
 	@Test
 	public void initialization() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
 		
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
 			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		}else{
 			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void database(){
 		
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
@@ -160,49 +160,49 @@ public class TestCLI {
 	@Test
 	public void variableSubsitution() throws FileNotFoundException{
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
 		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
 		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
 		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void hiveVar() throws FileNotFoundException{
 		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
 		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
 		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		
 		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
 		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@AfterClass
 	public static void cleanup(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index e8c4763..8bf7141 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.commons.exec.CommandLine;
 import org.apache.commons.logging.Log;
@@ -51,8 +51,8 @@ import java.util.Random;
 
 
 public class TestHCatalog {
-  private static final String JOBJAR = "odpi.test.hive.hcat.job.jar";
-  private static final String HCATCORE = "odpi.test.hive.hcat.core.jar";
+  private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
+  private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
 
   private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
 
@@ -92,7 +92,7 @@ public class TestHCatalog {
   public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
       InterruptedException, URISyntaxException {
     // Create a table to write to
-    final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+    final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
     SerDeInfo serde = new SerDeInfo("default_serde",
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
     FieldSchema schema = new FieldSchema("line", "string", "");
@@ -106,7 +106,7 @@ public class TestHCatalog {
         new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
     client.createTable(table);
 
-    final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+    final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
     sd = new StorageDescriptor(Arrays.asList(
           new FieldSchema("word", "string", ""),
           new FieldSchema("count", "int", "")),

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
index 6a99a17..a5a896a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -295,9 +295,9 @@ public class TestJdbc extends JdbcConnector {
     try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
         ResultSet.CONCUR_READ_ONLY)) {
 
-      final String dbName = "odpi_jdbc_test_db";
+      final String dbName = "bigtop_jdbc_test_db";
 
-      final String tableName = "odpi_jdbc_test_table";
+      final String tableName = "bigtop_jdbc_test_table";
       stmt.execute("drop table if exists " + tableName);
 
       stmt.execute("drop database if exists " + dbName + " cascade");
@@ -345,7 +345,7 @@ public class TestJdbc extends JdbcConnector {
     try (Statement stmt = conn.createStatement()) {
       stmt.clearWarnings();
 
-      final String tableName = "odpi_jdbc_statement_test_table";
+      final String tableName = "bigtop_jdbc_statement_test_table";
 
       stmt.execute("drop table if exists " + tableName);
       stmt.execute("create table " + tableName + " (a int, b varchar(32))");
@@ -409,7 +409,7 @@ public class TestJdbc extends JdbcConnector {
 
   @Test
   public void preparedStmtAndResultSet() throws SQLException {
-    final String tableName = "odpi_jdbc_psars_test_table";
+    final String tableName = "bigtop_jdbc_psars_test_table";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + tableName);
       stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
index 99451d1..06af1da 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -31,8 +31,8 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void db() throws SQLException {
-    final String db1 = "odpi_sql_db1";
-    final String db2 = "odpi_sql_db2";
+    final String db1 = "bigtop_sql_db1";
+    final String db2 = "bigtop_sql_db2";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop database if exists " + db1 + " cascade");
 
@@ -56,11 +56,11 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void table() throws SQLException {
-    final String table1 = "odpi_sql_table1";
-    final String table2 = "odpi_sql_table2";
-    final String table3 = "odpi_sql_table3";
-    final String table4 = "odpi_sql_table4";
-    final String table5 = "odpi_sql_table5";
+    final String table1 = "bigtop_sql_table1";
+    final String table2 = "bigtop_sql_table2";
+    final String table3 = "bigtop_sql_table3";
+    final String table4 = "bigtop_sql_table4";
+    final String table5 = "bigtop_sql_table5";
 
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + table1);
@@ -126,7 +126,7 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void partitionedTable() throws SQLException {
-    final String table1 = "odpi_sql_ptable1";
+    final String table1 = "bigtop_sql_ptable1";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + table1);
 
@@ -157,9 +157,9 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void view() throws SQLException {
-    final String table1 = "odpi_sql_vtable1";
-    final String view1 = "odpi_sql_view1";
-    final String view2 = "odpi_sql_view2";
+    final String table1 = "bigtop_sql_vtable1";
+    final String view1 = "bigtop_sql_view1";
+    final String view2 = "bigtop_sql_view2";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + table1);
       stmt.execute("drop view if exists " + view1);
@@ -185,8 +185,8 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void function() throws SQLException {
-    final String func1 = "odpi_sql_func1";
-    final String func2 = "odpi_sql_func2";
+    final String func1 = "bigtop_sql_func1";
+    final String func2 = "bigtop_sql_func2";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("create temporary function " + func1 +
           " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
@@ -208,8 +208,8 @@ public class TestSql extends JdbcConnector {
   // test machine has access to HDFS and thus the ability to upload a file.
   @Test
   public void insert() throws SQLException {
-    final String table1 = "odpi_insert_table1";
-    final String table2 = "odpi_insert_table2";
+    final String table1 = "bigtop_insert_table1";
+    final String table2 = "bigtop_insert_table2";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + table1);
       stmt.execute("create table " + table1 +
@@ -269,7 +269,7 @@ public class TestSql extends JdbcConnector {
   // This tests CTEs
   @Test
   public void cte() throws SQLException {
-    final String table1 = "odpi_cte_table1";
+    final String table1 = "bigtop_cte_table1";
     try (Statement stmt = conn.createStatement()) {
       stmt.execute("drop table if exists " + table1);
       stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
@@ -284,7 +284,7 @@ public class TestSql extends JdbcConnector {
 
   @Test
   public void select() throws SQLException {
-    final String[] tables = {"odpi_select_table1", "odpi_select_table2"};
+    final String[] tables = {"bigtop_select_table1", "bigtop_select_table2"};
     try (Statement stmt = conn.createStatement()) {
       for (int i = 0; i < tables.length; i++) {
         stmt.execute("drop table if exists " + tables[i]);

http://git-wip-us.apache.org/repos/asf/bigtop/blob/0f51fb32/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index 58c6595..f54b7e5 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.odpi.specs.runtime.hive;
+package org.apache.bigtop.itest.hive;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -71,7 +71,7 @@ public class TestThrift {
 
   @Test
   public void db() throws TException {
-    final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
+    final String dbName = "bigtop_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
 
     Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
     client.createDatabase(db);
@@ -80,7 +80,7 @@ public class TestThrift {
     db = new Database(db);
     db.getParameters().put("a", "b");
     client.alterDatabase(dbName, db);
-    List<String> alldbs = client.getDatabases("odpi_*");
+    List<String> alldbs = client.getDatabases("bigtop_*");
     Assert.assertNotNull(alldbs);
     Assert.assertTrue(alldbs.size() > 0);
     alldbs = client.getAllDatabases();
@@ -93,7 +93,7 @@ public class TestThrift {
 
   @Test
   public void nonPartitionedTable() throws TException {
-    final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
+    final String tableName = "bigtop_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
 
     // I don't test every operation related to tables, but only those that are frequently used.
     SerDeInfo serde = new SerDeInfo("default_serde",
@@ -115,7 +115,7 @@ public class TestThrift {
     Assert.assertNotNull(tables);
     Assert.assertEquals(1, tables.size());
 
-    List<String> tableNames = client.getTables("default", "odpi_*");
+    List<String> tableNames = client.getTables("default", "bigtop_*");
     Assert.assertNotNull(tableNames);
     Assert.assertTrue(tableNames.size() >= 1);
 
@@ -143,7 +143,7 @@ public class TestThrift {
 
   @Test
   public void partitionedTable() throws TException {
-    final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
+    final String tableName = "bigtop_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
 
     // I don't test every operation related to tables, but only those that are frequently used.
     SerDeInfo serde = new SerDeInfo("default_serde",


[48/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index c55bb92..2341e9b 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -28,186 +28,209 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 
 public class TestCLI {
-	
-	static Map<String, String> results;
-	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
-	
-	@BeforeClass
-	public static void setup(){
-		
-		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
-		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
-	}
-	
-	@Test
-	public void help(){		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
-		//LOG.info(results.get("exitValue"));
-		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
-		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
-		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
-	}
-	 
-	@Test
-	public void sqlFromCmdLine(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void sqlFromFiles() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void silent() {
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-	}
-	
-	@Test
-	public void verbose(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
-	}
-	
-	@Test
-	public void initialization() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void database(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveConf(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-	}
-	
-	@Test
-	public void variableSubsitution() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveVar() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		
-		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@AfterClass
-	public static void cleanup(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
-	}
-	 
+
+    static Map<String, String> results;
+    static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=bigtop_metastore_db;create=true";
+
+    @BeforeClass
+    public static void setup() {
+
+        results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+        Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+    }
+
+    @Test
+    public void help() {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+        //LOG.info(results.get("exitValue"));
+        Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+        Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+        Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+    }
+
+    @Test
+    public void sqlFromCmdLine() {
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+        }
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @Test
+    public void sqlFromFiles() throws FileNotFoundException {
+        try (PrintWriter out = new PrintWriter("hive-f1.sql")) {
+            out.println("SHOW DATABASES;");
+        }
+        try (PrintWriter out = new PrintWriter("hive-f2.sql")) {
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+        try (PrintWriter out = new PrintWriter("hive-f3.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_hive;");
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+        try (PrintWriter out = new PrintWriter("hive-f4.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_hive;");
+        }
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+        }
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @Test
+    public void silent() {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+    }
+
+    @Test
+    public void verbose() {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+    }
+
+    @Test
+    public void initialization() throws FileNotFoundException {
+        try (PrintWriter out = new PrintWriter("hive-init1.sql")) {
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+        try (PrintWriter out = new PrintWriter("hive-init2.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_hive;");
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+            Assert.assertEquals("Could not create database bigtop_runtime_hive using the init -i option.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+        }
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @Test
+    public void database() {
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        }
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("Non-existent database returned with wrong exit code: " + Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("CREATE TABLE bigtop ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DESCRIBE bigtop").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("Failed to get expected column after creating bigtop table using --database argument.", true, results.get("outputStream").contains("myid"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("bigtop_runtime_hive").addArgument("-e").addArgument("DROP TABLE bigtop").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @Test
+    public void hiveConf() {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+    }
+
+    @Test
+    public void variableSubsitution() throws FileNotFoundException {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        }
+        try (PrintWriter out = new PrintWriter("hive-define.sql")) {
+            out.println("show ${A};");
+            out.println("quit;");
+        }
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+        Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+        Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '" + db + "' < hive-define.sql", false));
+        Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+        Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @Test
+    public void hiveVar() throws FileNotFoundException {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+            results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        }
+        try (PrintWriter out = new PrintWriter("hive-var.sql")) {
+            out.println("show ${A};");
+            out.println("quit;");
+        }
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '" + db + "' < hive-var.sql", false));
+        Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+        Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+        try (PrintWriter out = new PrintWriter("hiveconf-var.sql")) {
+            out.println("show ${hiveconf:A};");
+            out.println("quit;");
+        }
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '" + db + "' < hiveconf-var.sql", false));
+        Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+        Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("bigtop_runtime_hive"));
+
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+    }
+
+    @AfterClass
+    public static void cleanup() {
+        results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive").addArgument("--hiveconf").addArgument(db));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 8bf7141..bb4287f 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -51,108 +51,108 @@ import java.util.Random;
 
 
 public class TestHCatalog {
-  private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
-  private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
-
-  private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
-
-  private static IMetaStoreClient client = null;
-  private static HiveConf conf;
-  private static HCatSchema inputSchema;
-  private static HCatSchema outputSchema;
-
-  private Random rand;
-
-  @BeforeClass
-  public static void connect() throws MetaException {
-    if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
-      String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
-          "Hive conf directory ");
-      String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
-          "Hadoop conf directory ");
-      conf = new HiveConf();
-      String fileSep = System.getProperty("file.separator");
-      conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
-      conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
-      client = new HiveMetaStoreClient(conf);
+    private static final String JOBJAR = "bigtop.test.hive.hcat.job.jar";
+    private static final String HCATCORE = "bigtop.test.hive.hcat.core.jar";
+
+    private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
+
+    private static IMetaStoreClient client = null;
+    private static HiveConf conf;
+    private static HCatSchema inputSchema;
+    private static HCatSchema outputSchema;
+
+    private Random rand;
+
+    @BeforeClass
+    public static void connect() throws MetaException {
+        if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
+            String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
+                    "Hive conf directory ");
+            String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
+                    "Hadoop conf directory ");
+            conf = new HiveConf();
+            String fileSep = System.getProperty("file.separator");
+            conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
+            conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
+            conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
+            conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
+            conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
+            client = new HiveMetaStoreClient(conf);
+
+        }
+    }
+
+    @Before
+    public void checkIfActive() {
+        Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
+        rand = new Random();
+    }
 
+    @Test
+    public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
+            InterruptedException, URISyntaxException {
+        // Create a table to write to
+        final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+        SerDeInfo serde = new SerDeInfo("default_serde",
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+        FieldSchema schema = new FieldSchema("line", "string", "");
+        inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
+                HCatFieldSchema.Type.STRING, schema.getComment())));
+        StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
+                "org.apache.hadoop.mapred.TextInputFormat",
+                "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+                new HashMap<String, String>());
+        Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
+                new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+        client.createTable(table);
+
+        final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+        sd = new StorageDescriptor(Arrays.asList(
+                new FieldSchema("word", "string", ""),
+                new FieldSchema("count", "int", "")),
+                null, "org.apache.hadoop.mapred.TextInputFormat",
+                "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+                new HashMap<String, String>());
+        table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
+                new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+        client.createTable(table);
+        outputSchema = new HCatSchema(Arrays.asList(
+                new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
+                new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
+
+        // LATER Could I use HCatWriter here and the reader to read it?
+        // Write some stuff into a file in the location of the table
+        table = client.getTable("default", inputTable);
+        String inputFile = table.getSd().getLocation() + "/input";
+        Path inputPath = new Path(inputFile);
+        FileSystem fs = FileSystem.get(conf);
+        FSDataOutputStream out = fs.create(inputPath);
+        out.writeChars("Mary had a little lamb\n");
+        out.writeChars("its fleece was white as snow\n");
+        out.writeChars("and everywhere that Mary went\n");
+        out.writeChars("the lamb was sure to go\n");
+        out.close();
+
+        Map<String, String> env = new HashMap<>();
+        env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
+        Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
+                .addArgument("--service")
+                .addArgument("jar")
+                .addArgument(System.getProperty(JOBJAR))
+                .addArgument(HCatalogMR.class.getName())
+                .addArgument("-it")
+                .addArgument(inputTable)
+                .addArgument("-ot")
+                .addArgument(outputTable)
+                .addArgument("-is")
+                .addArgument(inputSchema.getSchemaAsTypeString())
+                .addArgument("-os")
+                .addArgument(outputSchema.getSchemaAsTypeString()), env);
+        LOG.info(results.toString());
+        Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
+
+        client.dropTable("default", inputTable);
+        client.dropTable("default", outputTable);
     }
-  }
-
-  @Before
-  public void checkIfActive() {
-    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
-    rand = new Random();
-  }
-
-  @Test
-  public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
-      InterruptedException, URISyntaxException {
-    // Create a table to write to
-    final String inputTable = "bigtop_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema schema = new FieldSchema("line", "string", "");
-    inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
-        HCatFieldSchema.Type.STRING, schema.getComment())));
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
-        "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    final String outputTable = "bigtop_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
-    sd = new StorageDescriptor(Arrays.asList(
-          new FieldSchema("word", "string", ""),
-          new FieldSchema("count", "int", "")),
-        null, "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-    outputSchema = new HCatSchema(Arrays.asList(
-        new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
-        new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
-
-    // LATER Could I use HCatWriter here and the reader to read it?
-    // Write some stuff into a file in the location of the table
-    table = client.getTable("default", inputTable);
-    String inputFile = table.getSd().getLocation() + "/input";
-    Path inputPath = new Path(inputFile);
-    FileSystem fs = FileSystem.get(conf);
-    FSDataOutputStream out = fs.create(inputPath);
-    out.writeChars("Mary had a little lamb\n");
-    out.writeChars("its fleece was white as snow\n");
-    out.writeChars("and everywhere that Mary went\n");
-    out.writeChars("the lamb was sure to go\n");
-    out.close();
-
-    Map<String, String> env = new HashMap<>();
-    env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
-    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
-        .addArgument("--service")
-        .addArgument("jar")
-        .addArgument(System.getProperty(JOBJAR))
-        .addArgument(HCatalogMR.class.getName())
-        .addArgument("-it")
-        .addArgument(inputTable)
-        .addArgument("-ot")
-        .addArgument(outputTable)
-        .addArgument("-is")
-        .addArgument(inputSchema.getSchemaAsTypeString())
-        .addArgument("-os")
-        .addArgument(outputSchema.getSchemaAsTypeString()), env);
-    LOG.info(results.toString());
-    Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
-
-    client.dropTable("default", inputTable);
-    client.dropTable("default", outputTable);
-  }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
index a5a896a..6356640 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -32,514 +32,516 @@ import java.sql.Statement;
 import java.sql.Types;
 
 public class TestJdbc extends JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
-
-  /**
-   * Test simple non-statement related class.  setSchema is tested elsewhere because there's work
-   * to do for that one.  Similarly with getMetadata.
-   * @throws SQLException
-   */
-  @Test
-  public void nonStatementCalls() throws SQLException {
-    conn.clearWarnings();
-
-    boolean isAutoCommit = conn.getAutoCommit();
-    LOG.debug("Auto commit is " + isAutoCommit);
-
-    String catalog = conn.getCatalog();
-    LOG.debug("Catalog is " + catalog);
-
-    String schema = conn.getSchema();
-    LOG.debug("Schema is " + schema);
-
-    int txnIsolation = conn.getTransactionIsolation();
-    LOG.debug("Transaction Isolation is " + txnIsolation);
-
-    SQLWarning warning = conn.getWarnings();
-    while (warning != null) {
-      LOG.debug("Found a warning: " + warning.getMessage());
-      warning = warning.getNextWarning();
+    private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
+
+    /**
+     * Test simple non-statement related class.  setSchema is tested elsewhere because there's work
+     * to do for that one.  Similarly with getMetadata.
+     *
+     * @throws SQLException
+     */
+    @Test
+    public void nonStatementCalls() throws SQLException {
+        conn.clearWarnings();
+
+        boolean isAutoCommit = conn.getAutoCommit();
+        LOG.debug("Auto commit is " + isAutoCommit);
+
+        String catalog = conn.getCatalog();
+        LOG.debug("Catalog is " + catalog);
+
+        String schema = conn.getSchema();
+        LOG.debug("Schema is " + schema);
+
+        int txnIsolation = conn.getTransactionIsolation();
+        LOG.debug("Transaction Isolation is " + txnIsolation);
+
+        SQLWarning warning = conn.getWarnings();
+        while (warning != null) {
+            LOG.debug("Found a warning: " + warning.getMessage());
+            warning = warning.getNextWarning();
+        }
+
+        boolean closed = conn.isClosed();
+        LOG.debug("Is closed? " + closed);
+
+        boolean readOnly = conn.isReadOnly();
+        LOG.debug("Is read only?" + readOnly);
+
+        // Hive doesn't support catalogs, so setting this to whatever should be fine.  If we have
+        // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
+        // issues, so we may need to make this value configurable or something.
+        conn.setCatalog("fred");
     }
 
-    boolean closed = conn.isClosed();
-    LOG.debug("Is closed? " + closed);
+    /**
+     * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
+     * that on a valid table.  Same with getFunctions.
+     *
+     * @throws SQLException
+     */
+    @Test
+    public void databaseMetaDataCalls() throws SQLException {
+        DatabaseMetaData md = conn.getMetaData();
 
-    boolean readOnly = conn.isReadOnly();
-    LOG.debug("Is read only?" + readOnly);
+        boolean boolrc = md.allTablesAreSelectable();
+        LOG.debug("All tables are selectable? " + boolrc);
 
-    // Hive doesn't support catalogs, so setting this to whatever should be fine.  If we have
-    // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
-    // issues, so we may need to make this value configurable or something.
-    conn.setCatalog("fred");
-  }
-
-  /**
-   * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
-   * that on a valid table.  Same with getFunctions.
-   * @throws SQLException
-   */
-  @Test
-  public void databaseMetaDataCalls() throws SQLException {
-    DatabaseMetaData md = conn.getMetaData();
-
-    boolean boolrc = md.allTablesAreSelectable();
-    LOG.debug("All tables are selectable? " + boolrc);
-
-    String strrc = md.getCatalogSeparator();
-    LOG.debug("Catalog separator " + strrc);
-
-    strrc = md.getCatalogTerm();
-    LOG.debug("Catalog term " + strrc);
-
-    ResultSet rs = md.getCatalogs();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found catalog " + strrc);
-    }
+        String strrc = md.getCatalogSeparator();
+        LOG.debug("Catalog separator " + strrc);
 
-    Connection c = md.getConnection();
+        strrc = md.getCatalogTerm();
+        LOG.debug("Catalog term " + strrc);
 
-    int intrc = md.getDatabaseMajorVersion();
-    LOG.debug("DB major version is " + intrc);
+        ResultSet rs = md.getCatalogs();
+        while (rs.next()) {
+            strrc = rs.getString(1);
+            LOG.debug("Found catalog " + strrc);
+        }
 
-    intrc = md.getDatabaseMinorVersion();
-    LOG.debug("DB minor version is " + intrc);
+        Connection c = md.getConnection();
 
-    strrc = md.getDatabaseProductName();
-    LOG.debug("DB product name is " + strrc);
+        int intrc = md.getDatabaseMajorVersion();
+        LOG.debug("DB major version is " + intrc);
 
-    strrc = md.getDatabaseProductVersion();
-    LOG.debug("DB product version is " + strrc);
+        intrc = md.getDatabaseMinorVersion();
+        LOG.debug("DB minor version is " + intrc);
 
-    intrc = md.getDefaultTransactionIsolation();
-    LOG.debug("Default transaction isolation is " + intrc);
+        strrc = md.getDatabaseProductName();
+        LOG.debug("DB product name is " + strrc);
 
-    intrc = md.getDriverMajorVersion();
-    LOG.debug("Driver major version is " + intrc);
+        strrc = md.getDatabaseProductVersion();
+        LOG.debug("DB product version is " + strrc);
 
-    intrc = md.getDriverMinorVersion();
-    LOG.debug("Driver minor version is " + intrc);
+        intrc = md.getDefaultTransactionIsolation();
+        LOG.debug("Default transaction isolation is " + intrc);
 
-    strrc = md.getDriverName();
-    LOG.debug("Driver name is " + strrc);
+        intrc = md.getDriverMajorVersion();
+        LOG.debug("Driver major version is " + intrc);
 
-    strrc = md.getDriverVersion();
-    LOG.debug("Driver version is " + strrc);
+        intrc = md.getDriverMinorVersion();
+        LOG.debug("Driver minor version is " + intrc);
 
-    strrc = md.getExtraNameCharacters();
-    LOG.debug("Extra name characters is " + strrc);
+        strrc = md.getDriverName();
+        LOG.debug("Driver name is " + strrc);
 
-    strrc = md.getIdentifierQuoteString();
-    LOG.debug("Identifier quote string is " + strrc);
+        strrc = md.getDriverVersion();
+        LOG.debug("Driver version is " + strrc);
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getImportedKeys("a", "b", "d");
+        strrc = md.getExtraNameCharacters();
+        LOG.debug("Extra name characters is " + strrc);
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getIndexInfo("a", "b", "d", true, true);
+        strrc = md.getIdentifierQuoteString();
+        LOG.debug("Identifier quote string is " + strrc);
 
-    intrc = md.getJDBCMajorVersion();
-    LOG.debug("JDBC major version is " + intrc);
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getImportedKeys("a", "b", "d");
 
-    intrc = md.getJDBCMinorVersion();
-    LOG.debug("JDBC minor version is " + intrc);
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getIndexInfo("a", "b", "d", true, true);
 
-    intrc = md.getMaxColumnNameLength();
-    LOG.debug("Maximum column name length is " + intrc);
+        intrc = md.getJDBCMajorVersion();
+        LOG.debug("JDBC major version is " + intrc);
 
-    strrc = md.getNumericFunctions();
-    LOG.debug("Numeric functions are " + strrc);
+        intrc = md.getJDBCMinorVersion();
+        LOG.debug("JDBC minor version is " + intrc);
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getPrimaryKeys("a", "b", "d");
+        intrc = md.getMaxColumnNameLength();
+        LOG.debug("Maximum column name length is " + intrc);
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedureColumns("a", "b", "d", "e");
+        strrc = md.getNumericFunctions();
+        LOG.debug("Numeric functions are " + strrc);
 
-    strrc = md.getProcedureTerm();
-    LOG.debug("Procedures are called " + strrc);
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getPrimaryKeys("a", "b", "d");
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedures("a", "b", "d");
-
-    strrc = md.getSchemaTerm();
-    LOG.debug("Schemas are called " + strrc);
-
-    rs = md.getSchemas();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found schema " + strrc);
-    }
-
-    strrc = md.getSearchStringEscape();
-    LOG.debug("Search string escape is " + strrc);
-
-    strrc = md.getStringFunctions();
-    LOG.debug("String functions are " + strrc);
-
-    strrc = md.getSystemFunctions();
-    LOG.debug("System functions are " + strrc);
-
-    rs = md.getTableTypes();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found table type " + strrc);
-    }
-
-    strrc = md.getTimeDateFunctions();
-    LOG.debug("Time/date functions are " + strrc);
-
-    rs = md.getTypeInfo();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found type " + strrc);
-    }
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getProcedureColumns("a", "b", "d", "e");
 
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getUDTs("a", "b", "d", null);
+        strrc = md.getProcedureTerm();
+        LOG.debug("Procedures are called " + strrc);
 
-    boolrc = md.supportsAlterTableWithAddColumn();
-    LOG.debug("Supports alter table with add column? " + boolrc);
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getProcedures("a", "b", "d");
 
-    boolrc = md.supportsAlterTableWithDropColumn();
-    LOG.debug("Supports alter table with drop column? " + boolrc);
+        strrc = md.getSchemaTerm();
+        LOG.debug("Schemas are called " + strrc);
 
-    boolrc = md.supportsBatchUpdates();
-    LOG.debug("Supports batch updates? " + boolrc);
+        rs = md.getSchemas();
+        while (rs.next()) {
+            strrc = rs.getString(1);
+            LOG.debug("Found schema " + strrc);
+        }
 
-    boolrc = md.supportsCatalogsInDataManipulation();
-    LOG.debug("Supports catalogs in data manipulation? " + boolrc);
+        strrc = md.getSearchStringEscape();
+        LOG.debug("Search string escape is " + strrc);
 
-    boolrc = md.supportsCatalogsInIndexDefinitions();
-    LOG.debug("Supports catalogs in index definition? " + boolrc);
+        strrc = md.getStringFunctions();
+        LOG.debug("String functions are " + strrc);
 
-    boolrc = md.supportsCatalogsInPrivilegeDefinitions();
-    LOG.debug("Supports catalogs in privilege definition? " + boolrc);
+        strrc = md.getSystemFunctions();
+        LOG.debug("System functions are " + strrc);
 
-    boolrc = md.supportsCatalogsInProcedureCalls();
-    LOG.debug("Supports catalogs in procedure calls? " + boolrc);
+        rs = md.getTableTypes();
+        while (rs.next()) {
+            strrc = rs.getString(1);
+            LOG.debug("Found table type " + strrc);
+        }
 
-    boolrc = md.supportsCatalogsInTableDefinitions();
-    LOG.debug("Supports catalogs in table definition? " + boolrc);
+        strrc = md.getTimeDateFunctions();
+        LOG.debug("Time/date functions are " + strrc);
 
-    boolrc = md.supportsColumnAliasing();
-    LOG.debug("Supports column aliasing? " + boolrc);
+        rs = md.getTypeInfo();
+        while (rs.next()) {
+            strrc = rs.getString(1);
+            LOG.debug("Found type " + strrc);
+        }
 
-    boolrc = md.supportsFullOuterJoins();
-    LOG.debug("Supports full outer joins? " + boolrc);
+        // In Hive 1.2 this always returns an empty RS
+        rs = md.getUDTs("a", "b", "d", null);
 
-    boolrc = md.supportsGroupBy();
-    LOG.debug("Supports group by? " + boolrc);
+        boolrc = md.supportsAlterTableWithAddColumn();
+        LOG.debug("Supports alter table with add column? " + boolrc);
 
-    boolrc = md.supportsLimitedOuterJoins();
-    LOG.debug("Supports limited outer joins? " + boolrc);
+        boolrc = md.supportsAlterTableWithDropColumn();
+        LOG.debug("Supports alter table with drop column? " + boolrc);
 
-    boolrc = md.supportsMultipleResultSets();
-    LOG.debug("Supports limited outer joins? " + boolrc);
+        boolrc = md.supportsBatchUpdates();
+        LOG.debug("Supports batch updates? " + boolrc);
 
-    boolrc = md.supportsNonNullableColumns();
-    LOG.debug("Supports non-nullable columns? " + boolrc);
+        boolrc = md.supportsCatalogsInDataManipulation();
+        LOG.debug("Supports catalogs in data manipulation? " + boolrc);
 
-    boolrc = md.supportsOuterJoins();
-    LOG.debug("Supports outer joins? " + boolrc);
+        boolrc = md.supportsCatalogsInIndexDefinitions();
+        LOG.debug("Supports catalogs in index definition? " + boolrc);
 
-    boolrc = md.supportsPositionedDelete();
-    LOG.debug("Supports positioned delete? " + boolrc);
+        boolrc = md.supportsCatalogsInPrivilegeDefinitions();
+        LOG.debug("Supports catalogs in privilege definition? " + boolrc);
 
-    boolrc = md.supportsPositionedUpdate();
-    LOG.debug("Supports positioned update? " + boolrc);
+        boolrc = md.supportsCatalogsInProcedureCalls();
+        LOG.debug("Supports catalogs in procedure calls? " + boolrc);
 
-    boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set holdability? " + boolrc);
+        boolrc = md.supportsCatalogsInTableDefinitions();
+        LOG.debug("Supports catalogs in table definition? " + boolrc);
 
-    boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set type? " + boolrc);
+        boolrc = md.supportsColumnAliasing();
+        LOG.debug("Supports column aliasing? " + boolrc);
 
-    boolrc = md.supportsSavepoints();
-    LOG.debug("Supports savepoints? " + boolrc);
+        boolrc = md.supportsFullOuterJoins();
+        LOG.debug("Supports full outer joins? " + boolrc);
 
-    boolrc = md.supportsSchemasInDataManipulation();
-    LOG.debug("Supports schemas in data manipulation? " + boolrc);
+        boolrc = md.supportsGroupBy();
+        LOG.debug("Supports group by? " + boolrc);
 
-    boolrc = md.supportsSchemasInIndexDefinitions();
-    LOG.debug("Supports schemas in index definitions? " + boolrc);
+        boolrc = md.supportsLimitedOuterJoins();
+        LOG.debug("Supports limited outer joins? " + boolrc);
 
-    boolrc = md.supportsSchemasInPrivilegeDefinitions();
-    LOG.debug("Supports schemas in privilege definitions? " + boolrc);
+        boolrc = md.supportsMultipleResultSets();
+        LOG.debug("Supports limited outer joins? " + boolrc);
 
-    boolrc = md.supportsSchemasInProcedureCalls();
-    LOG.debug("Supports schemas in procedure calls? " + boolrc);
+        boolrc = md.supportsNonNullableColumns();
+        LOG.debug("Supports non-nullable columns? " + boolrc);
 
-    boolrc = md.supportsSchemasInTableDefinitions();
-    LOG.debug("Supports schemas in table definitions? " + boolrc);
+        boolrc = md.supportsOuterJoins();
+        LOG.debug("Supports outer joins? " + boolrc);
 
-    boolrc = md.supportsSelectForUpdate();
-    LOG.debug("Supports select for update? " + boolrc);
+        boolrc = md.supportsPositionedDelete();
+        LOG.debug("Supports positioned delete? " + boolrc);
 
-    boolrc = md.supportsStoredProcedures();
-    LOG.debug("Supports stored procedures? " + boolrc);
+        boolrc = md.supportsPositionedUpdate();
+        LOG.debug("Supports positioned update? " + boolrc);
 
-    boolrc = md.supportsTransactions();
-    LOG.debug("Supports transactions? " + boolrc);
+        boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+        LOG.debug("Supports result set holdability? " + boolrc);
 
-    boolrc = md.supportsUnion();
-    LOG.debug("Supports union? " + boolrc);
+        boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+        LOG.debug("Supports result set type? " + boolrc);
 
-    boolrc = md.supportsUnionAll();
-    LOG.debug("Supports union all? " + boolrc);
+        boolrc = md.supportsSavepoints();
+        LOG.debug("Supports savepoints? " + boolrc);
 
-  }
+        boolrc = md.supportsSchemasInDataManipulation();
+        LOG.debug("Supports schemas in data manipulation? " + boolrc);
 
-  @Test
-  public void setSchema() throws SQLException {
-    try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
-        ResultSet.CONCUR_READ_ONLY)) {
+        boolrc = md.supportsSchemasInIndexDefinitions();
+        LOG.debug("Supports schemas in index definitions? " + boolrc);
 
-      final String dbName = "bigtop_jdbc_test_db";
+        boolrc = md.supportsSchemasInPrivilegeDefinitions();
+        LOG.debug("Supports schemas in privilege definitions? " + boolrc);
 
-      final String tableName = "bigtop_jdbc_test_table";
-      stmt.execute("drop table if exists " + tableName);
+        boolrc = md.supportsSchemasInProcedureCalls();
+        LOG.debug("Supports schemas in procedure calls? " + boolrc);
 
-      stmt.execute("drop database if exists " + dbName + " cascade");
-      stmt.execute("create database " + dbName);
+        boolrc = md.supportsSchemasInTableDefinitions();
+        LOG.debug("Supports schemas in table definitions? " + boolrc);
 
-      conn.setSchema(dbName);
+        boolrc = md.supportsSelectForUpdate();
+        LOG.debug("Supports select for update? " + boolrc);
 
-      DatabaseMetaData md = conn.getMetaData();
+        boolrc = md.supportsStoredProcedures();
+        LOG.debug("Supports stored procedures? " + boolrc);
 
-      ResultSet rs = md.getSchemas(null, dbName);
+        boolrc = md.supportsTransactions();
+        LOG.debug("Supports transactions? " + boolrc);
 
-      while (rs.next()) {
-        String schemaName = rs.getString(2);
-        LOG.debug("Schema name is " + schemaName);
-      }
+        boolrc = md.supportsUnion();
+        LOG.debug("Supports union? " + boolrc);
 
-      stmt.execute("create table " + tableName + " (i int, s varchar(32))");
+        boolrc = md.supportsUnionAll();
+        LOG.debug("Supports union all? " + boolrc);
 
-      rs = md.getTables(null, dbName, tableName, null);
-      while (rs.next()) {
-        String tName = rs.getString(3);
-        LOG.debug("Schema name is " + tName);
-      }
-
-      rs = md.getColumns(null, dbName, tableName, "i");
-      while (rs.next()) {
-        String colName = rs.getString(4);
-        LOG.debug("Schema name is " + colName);
-      }
-
-      rs = md.getFunctions(null, dbName, "foo");
-      while (rs.next()) {
-        String funcName = rs.getString(3);
-        LOG.debug("Schema name is " + funcName);
-      }
-    }
-  }
-
-  @Test
-  public void statement() throws SQLException {
-    try (Statement stmt = conn.createStatement()) {
-      stmt.cancel();
     }
 
-    try (Statement stmt = conn.createStatement()) {
-      stmt.clearWarnings();
-
-      final String tableName = "bigtop_jdbc_statement_test_table";
-
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (a int, b varchar(32))");
-
-      stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
-
-      int intrc = stmt.getUpdateCount();
-      LOG.debug("Update count is " + intrc);
-
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
-
-      Connection localConn = stmt.getConnection();
-
-      intrc = stmt.getFetchDirection();
-      LOG.debug("Fetch direction is " + intrc);
+    @Test
+    public void setSchema() throws SQLException {
+        try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+                ResultSet.CONCUR_READ_ONLY)) {
 
-      intrc = stmt.getFetchSize();
-      LOG.debug("Fetch size is " + intrc);
+            final String dbName = "bigtop_jdbc_test_db";
 
-      intrc = stmt.getMaxRows();
-      LOG.debug("max rows is " + intrc);
+            final String tableName = "bigtop_jdbc_test_table";
+            stmt.execute("drop table if exists " + tableName);
 
-      boolean boolrc = stmt.getMoreResults();
-      LOG.debug("more results is " + boolrc);
+            stmt.execute("drop database if exists " + dbName + " cascade");
+            stmt.execute("create database " + dbName);
 
-      intrc = stmt.getQueryTimeout();
-      LOG.debug("query timeout is " + intrc);
+            conn.setSchema(dbName);
 
-      stmt.execute("select * from " + tableName);
-      rs = stmt.getResultSet();
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
+            DatabaseMetaData md = conn.getMetaData();
 
-      intrc = stmt.getResultSetType();
-      LOG.debug("result set type is " + intrc);
+            ResultSet rs = md.getSchemas(null, dbName);
 
-      SQLWarning warning = stmt.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
+            while (rs.next()) {
+                String schemaName = rs.getString(2);
+                LOG.debug("Schema name is " + schemaName);
+            }
 
-      boolrc = stmt.isClosed();
-      LOG.debug("is closed " + boolrc);
+            stmt.execute("create table " + tableName + " (i int, s varchar(32))");
 
-      boolrc = stmt.isCloseOnCompletion();
-      LOG.debug("is close on completion " + boolrc);
+            rs = md.getTables(null, dbName, tableName, null);
+            while (rs.next()) {
+                String tName = rs.getString(3);
+                LOG.debug("Schema name is " + tName);
+            }
 
-      boolrc = stmt.isPoolable();
-      LOG.debug("is poolable " + boolrc);
-
-      stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
-      stmt.setFetchSize(500);
-      stmt.setMaxRows(500);
-    }
-  }
-
-  @Test
-  public void preparedStmtAndResultSet() throws SQLException {
-    final String tableName = "bigtop_jdbc_psars_test_table";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
-          "i int, lo bigint, sh smallint, st varchar(32))");
-    }
-
-    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
-    // try to put them in the query.
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
-        " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
-      ps.setBoolean(1, true);
-      ps.setByte(2, (byte)1);
-      ps.setDouble(3, 3.141592654);
-      ps.setFloat(4, 3.14f);
-      ps.setInt(5, 3);
-      ps.setLong(6, 10L);
-      ps.setShort(7, (short)20);
-      ps.setString(8, "abc");
-      ps.executeUpdate();
-    }
-
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
-        "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
-      ps.setNull(1, Types.INTEGER);
-      ps.setObject(2, "mary had a little lamb");
-      ps.executeUpdate();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.clearParameters();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.execute();
+            rs = md.getColumns(null, dbName, tableName, "i");
+            while (rs.next()) {
+                String colName = rs.getString(4);
+                LOG.debug("Schema name is " + colName);
+            }
 
+            rs = md.getFunctions(null, dbName, "foo");
+            while (rs.next()) {
+                String funcName = rs.getString(3);
+                LOG.debug("Schema name is " + funcName);
+            }
+        }
     }
 
-    try (Statement stmt = conn.createStatement()) {
+    @Test
+    public void statement() throws SQLException {
+        try (Statement stmt = conn.createStatement()) {
+            stmt.cancel();
+        }
 
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
+        try (Statement stmt = conn.createStatement()) {
+            stmt.clearWarnings();
 
-      ResultSetMetaData md = rs.getMetaData();
+            final String tableName = "bigtop_jdbc_statement_test_table";
 
-      int colCnt = md.getColumnCount();
-      LOG.debug("Column count is " + colCnt);
+            stmt.execute("drop table if exists " + tableName);
+            stmt.execute("create table " + tableName + " (a int, b varchar(32))");
 
-      for (int i = 1; i <= colCnt; i++) {
-        LOG.debug("Looking at column " + i);
-        String strrc = md.getColumnClassName(i);
-        LOG.debug("Column class name is " + strrc);
+            stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
 
-        int intrc = md.getColumnDisplaySize(i);
-        LOG.debug("Column display size is " + intrc);
+            int intrc = stmt.getUpdateCount();
+            LOG.debug("Update count is " + intrc);
 
-        strrc = md.getColumnLabel(i);
-        LOG.debug("Column label is " + strrc);
+            ResultSet rs = stmt.executeQuery("select * from " + tableName);
+            while (rs.next()) {
+                LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+            }
 
-        strrc = md.getColumnName(i);
-        LOG.debug("Column name is " + strrc);
+            Connection localConn = stmt.getConnection();
 
-        intrc = md.getColumnType(i);
-        LOG.debug("Column type is " + intrc);
+            intrc = stmt.getFetchDirection();
+            LOG.debug("Fetch direction is " + intrc);
 
-        strrc = md.getColumnTypeName(i);
-        LOG.debug("Column type name is " + strrc);
+            intrc = stmt.getFetchSize();
+            LOG.debug("Fetch size is " + intrc);
 
-        intrc = md.getPrecision(i);
-        LOG.debug("Precision is " + intrc);
+            intrc = stmt.getMaxRows();
+            LOG.debug("max rows is " + intrc);
 
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
+            boolean boolrc = stmt.getMoreResults();
+            LOG.debug("more results is " + boolrc);
 
-        boolean boolrc = md.isAutoIncrement(i);
-        LOG.debug("Is auto increment? " + boolrc);
+            intrc = stmt.getQueryTimeout();
+            LOG.debug("query timeout is " + intrc);
 
-        boolrc = md.isCaseSensitive(i);
-        LOG.debug("Is case sensitive? " + boolrc);
+            stmt.execute("select * from " + tableName);
+            rs = stmt.getResultSet();
+            while (rs.next()) {
+                LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+            }
 
-        boolrc = md.isCurrency(i);
-        LOG.debug("Is currency? " + boolrc);
+            intrc = stmt.getResultSetType();
+            LOG.debug("result set type is " + intrc);
 
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
+            SQLWarning warning = stmt.getWarnings();
+            while (warning != null) {
+                LOG.debug("Found a warning: " + warning.getMessage());
+                warning = warning.getNextWarning();
+            }
 
-        intrc = md.isNullable(i);
-        LOG.debug("Is nullable? " + intrc);
+            boolrc = stmt.isClosed();
+            LOG.debug("is closed " + boolrc);
 
-        boolrc = md.isReadOnly(i);
-        LOG.debug("Is read only? " + boolrc);
+            boolrc = stmt.isCloseOnCompletion();
+            LOG.debug("is close on completion " + boolrc);
 
-      }
+            boolrc = stmt.isPoolable();
+            LOG.debug("is poolable " + boolrc);
 
-      while (rs.next()) {
-        LOG.debug("bo = " + rs.getBoolean(1));
-        LOG.debug("bo = " + rs.getBoolean("bo"));
-        LOG.debug("ti = " + rs.getByte(2));
-        LOG.debug("ti = " + rs.getByte("ti"));
-        LOG.debug("db = " + rs.getDouble(3));
-        LOG.debug("db = " + rs.getDouble("db"));
-        LOG.debug("fl = " + rs.getFloat(4));
-        LOG.debug("fl = " + rs.getFloat("fl"));
-        LOG.debug("i = " + rs.getInt(5));
-        LOG.debug("i = " + rs.getInt("i"));
-        LOG.debug("lo = " + rs.getLong(6));
-        LOG.debug("lo = " + rs.getLong("lo"));
-        LOG.debug("sh = " + rs.getShort(7));
-        LOG.debug("sh = " + rs.getShort("sh"));
-        LOG.debug("st = " + rs.getString(8));
-        LOG.debug("st = " + rs.getString("st"));
-        LOG.debug("tm = " + rs.getObject(8));
-        LOG.debug("tm = " + rs.getObject("st"));
-        LOG.debug("tm was null " + rs.wasNull());
-      }
-      LOG.debug("bo is column " + rs.findColumn("bo"));
-
-      int intrc = rs.getConcurrency();
-      LOG.debug("concurrency " + intrc);
-
-      intrc = rs.getFetchDirection();
-      LOG.debug("fetch direction " + intrc);
-
-      intrc = rs.getType();
-      LOG.debug("type " + intrc);
-
-      Statement copy = rs.getStatement();
+            stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
+            stmt.setFetchSize(500);
+            stmt.setMaxRows(500);
+        }
+    }
 
-      SQLWarning warning = rs.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
-      rs.clearWarnings();
+    @Test
+    public void preparedStmtAndResultSet() throws SQLException {
+        final String tableName = "bigtop_jdbc_psars_test_table";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + tableName);
+            stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
+                    "i int, lo bigint, sh smallint, st varchar(32))");
+        }
+
+        // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
+        // try to put them in the query.
+        try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
+                " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
+            ps.setBoolean(1, true);
+            ps.setByte(2, (byte) 1);
+            ps.setDouble(3, 3.141592654);
+            ps.setFloat(4, 3.14f);
+            ps.setInt(5, 3);
+            ps.setLong(6, 10L);
+            ps.setShort(7, (short) 20);
+            ps.setString(8, "abc");
+            ps.executeUpdate();
+        }
+
+        try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
+                "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
+            ps.setNull(1, Types.INTEGER);
+            ps.setObject(2, "mary had a little lamb");
+            ps.executeUpdate();
+            ps.setNull(1, Types.INTEGER, null);
+            ps.setString(2, "its fleece was white as snow");
+            ps.clearParameters();
+            ps.setNull(1, Types.INTEGER, null);
+            ps.setString(2, "its fleece was white as snow");
+            ps.execute();
+
+        }
+
+        try (Statement stmt = conn.createStatement()) {
+
+            ResultSet rs = stmt.executeQuery("select * from " + tableName);
+
+            ResultSetMetaData md = rs.getMetaData();
+
+            int colCnt = md.getColumnCount();
+            LOG.debug("Column count is " + colCnt);
+
+            for (int i = 1; i <= colCnt; i++) {
+                LOG.debug("Looking at column " + i);
+                String strrc = md.getColumnClassName(i);
+                LOG.debug("Column class name is " + strrc);
+
+                int intrc = md.getColumnDisplaySize(i);
+                LOG.debug("Column display size is " + intrc);
+
+                strrc = md.getColumnLabel(i);
+                LOG.debug("Column label is " + strrc);
+
+                strrc = md.getColumnName(i);
+                LOG.debug("Column name is " + strrc);
+
+                intrc = md.getColumnType(i);
+                LOG.debug("Column type is " + intrc);
+
+                strrc = md.getColumnTypeName(i);
+                LOG.debug("Column type name is " + strrc);
+
+                intrc = md.getPrecision(i);
+                LOG.debug("Precision is " + intrc);
+
+                intrc = md.getScale(i);
+                LOG.debug("Scale is " + intrc);
+
+                boolean boolrc = md.isAutoIncrement(i);
+                LOG.debug("Is auto increment? " + boolrc);
+
+                boolrc = md.isCaseSensitive(i);
+                LOG.debug("Is case sensitive? " + boolrc);
+
+                boolrc = md.isCurrency(i);
+                LOG.debug("Is currency? " + boolrc);
+
+                intrc = md.getScale(i);
+                LOG.debug("Scale is " + intrc);
+
+                intrc = md.isNullable(i);
+                LOG.debug("Is nullable? " + intrc);
+
+                boolrc = md.isReadOnly(i);
+                LOG.debug("Is read only? " + boolrc);
+
+            }
+
+            while (rs.next()) {
+                LOG.debug("bo = " + rs.getBoolean(1));
+                LOG.debug("bo = " + rs.getBoolean("bo"));
+                LOG.debug("ti = " + rs.getByte(2));
+                LOG.debug("ti = " + rs.getByte("ti"));
+                LOG.debug("db = " + rs.getDouble(3));
+                LOG.debug("db = " + rs.getDouble("db"));
+                LOG.debug("fl = " + rs.getFloat(4));
+                LOG.debug("fl = " + rs.getFloat("fl"));
+                LOG.debug("i = " + rs.getInt(5));
+                LOG.debug("i = " + rs.getInt("i"));
+                LOG.debug("lo = " + rs.getLong(6));
+                LOG.debug("lo = " + rs.getLong("lo"));
+                LOG.debug("sh = " + rs.getShort(7));
+                LOG.debug("sh = " + rs.getShort("sh"));
+                LOG.debug("st = " + rs.getString(8));
+                LOG.debug("st = " + rs.getString("st"));
+                LOG.debug("tm = " + rs.getObject(8));
+                LOG.debug("tm = " + rs.getObject("st"));
+                LOG.debug("tm was null " + rs.wasNull());
+            }
+            LOG.debug("bo is column " + rs.findColumn("bo"));
+
+            int intrc = rs.getConcurrency();
+            LOG.debug("concurrency " + intrc);
+
+            intrc = rs.getFetchDirection();
+            LOG.debug("fetch direction " + intrc);
+
+            intrc = rs.getType();
+            LOG.debug("type " + intrc);
+
+            Statement copy = rs.getStatement();
+
+            SQLWarning warning = rs.getWarnings();
+            while (warning != null) {
+                LOG.debug("Found a warning: " + warning.getMessage());
+                warning = warning.getNextWarning();
+            }
+            rs.clearWarnings();
+        }
     }
-  }
 }


[46/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/5e342c45
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/5e342c45
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/5e342c45

Branch: refs/heads/master
Commit: 5e342c45364ec97f5e3530769a1cc8bdbcf69bb0
Parents: 77e0d6e
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 08:51:22 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:16 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/smoke-tests/odpi-runtime/README.md |  48 ++
 .../smoke-tests/odpi-runtime/build.gradle       |  63 +++
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 485 +++++++++++++++++
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 +++++
 .../src/main/resources/api-examiner-prep.sh     |  64 +++
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  | 275 ++++++++++
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 ++++
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  79 +++
 .../odpi/specs/runtime/hive/TestBeeline.java    | 201 +++++++
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 213 ++++++++
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 158 ++++++
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 545 +++++++++++++++++++
 .../org/odpi/specs/runtime/hive/TestSql.java    | 337 ++++++++++++
 .../org/odpi/specs/runtime/hive/TestThrift.java | 251 +++++++++
 .../src/test/python/find-public-apis.py         |  80 +++
 .../hadoop-common-2.7.3-api-report.json         |   1 +
 .../src/test/resources/hadoop-common-bin.list   |   2 +
 .../src/test/resources/hadoop-common-jar.list   |  60 ++
 .../src/test/resources/hadoop-common.list       | 230 ++++++++
 .../resources/hadoop-hdfs-2.7.3-api-report.json |   1 +
 .../src/test/resources/hadoop-hdfs-bin.list     |   1 +
 .../src/test/resources/hadoop-hdfs-jar.list     |  25 +
 .../src/test/resources/hadoop-hdfs.list         |  79 +++
 .../test/resources/hadoop-mapreduce-bin.list    |   1 +
 ...-mapreduce-client-core-2.7.3-api-report.json |   1 +
 .../test/resources/hadoop-mapreduce-jar.list    |  22 +
 .../src/test/resources/hadoop-mapreduce.list    | 123 +++++
 .../src/test/resources/hadoop-subprojs.list     |   4 +
 .../hadoop-yarn-api-2.7.3-api-report.json       |   1 +
 .../src/test/resources/hadoop-yarn-bin.list     |   3 +
 .../hadoop-yarn-client-2.7.3-api-report.json    |   1 +
 .../hadoop-yarn-common-2.7.3-api-report.json    |   1 +
 .../src/test/resources/hadoop-yarn-jar.list     |  38 ++
 .../src/test/resources/hadoop-yarn.list         |  74 +++
 .../test/resources/testRuntimeSpecConf.groovy   | 430 +++++++++++++++
 bigtop-tests/spec-tests/README.md               |  48 --
 bigtop-tests/spec-tests/build.gradle            |  63 ---
 bigtop-tests/spec-tests/runtime/build.gradle    |  63 ---
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 485 -----------------
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 137 -----
 .../src/main/resources/api-examiner-prep.sh     |  64 ---
 .../odpi/specs/runtime/TestSpecsRuntime.groovy  | 275 ----------
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 121 ----
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  79 ---
 .../odpi/specs/runtime/hive/TestBeeline.java    | 201 -------
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 213 --------
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 158 ------
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 545 -------------------
 .../org/odpi/specs/runtime/hive/TestSql.java    | 337 ------------
 .../org/odpi/specs/runtime/hive/TestThrift.java | 251 ---------
 .../runtime/src/test/python/find-public-apis.py |  80 ---
 .../hadoop-common-2.7.3-api-report.json         |   1 -
 .../src/test/resources/hadoop-common-bin.list   |   2 -
 .../src/test/resources/hadoop-common-jar.list   |  60 --
 .../src/test/resources/hadoop-common.list       | 230 --------
 .../resources/hadoop-hdfs-2.7.3-api-report.json |   1 -
 .../src/test/resources/hadoop-hdfs-bin.list     |   1 -
 .../src/test/resources/hadoop-hdfs-jar.list     |  25 -
 .../runtime/src/test/resources/hadoop-hdfs.list |  79 ---
 .../test/resources/hadoop-mapreduce-bin.list    |   1 -
 ...-mapreduce-client-core-2.7.3-api-report.json |   1 -
 .../test/resources/hadoop-mapreduce-jar.list    |  22 -
 .../src/test/resources/hadoop-mapreduce.list    | 123 -----
 .../src/test/resources/hadoop-subprojs.list     |   4 -
 .../hadoop-yarn-api-2.7.3-api-report.json       |   1 -
 .../src/test/resources/hadoop-yarn-bin.list     |   3 -
 .../hadoop-yarn-client-2.7.3-api-report.json    |   1 -
 .../hadoop-yarn-common-2.7.3-api-report.json    |   1 -
 .../src/test/resources/hadoop-yarn-jar.list     |  38 --
 .../runtime/src/test/resources/hadoop-yarn.list |  74 ---
 .../test/resources/testRuntimeSpecConf.groovy   | 430 ---------------
 build.gradle                                    |   3 +-
 settings.gradle                                 |   5 -
 73 files changed, 4157 insertions(+), 4224 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/README.md
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/README.md b/bigtop-tests/smoke-tests/odpi-runtime/README.md
new file mode 100644
index 0000000..8fde997
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/README.md
@@ -0,0 +1,48 @@
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Test suite to validate Hadoop basic specifications
+==================================================
+
+The test suite is intended to be used as a validation tool to make sure that a
+Hadoop stack derived from Apache Bigtop is still compliant with it. The
+minimalistic way of doing so would be to guarantee compatibility of the
+environment, binaries layouts, certain configuration parameters, and so on.
+
+Validation test suite for the specs is vaguely based on Apache Bigtop iTest and
+consists of two essential parts: a configuration file, communicating the 
+functional commands and expected outcome(s) of it; and the test driver to run
+the commands and compare the results.
+ 
+Running the tests
+=================
+
+Tests could be executed by running the following command 
+```
+  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
+```
+=======
+consists of two essential parts: a configuration file, communicating the
+functional commands and expected outcome(s) of it; and the test driver to run
+the commands and compare the results.
+
+Running the tests
+=================
+
+Tests could be executed by running the following command
+```
+  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
+```
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/build.gradle b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
new file mode 100644
index 0000000..97e3635
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/build.gradle
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+def junitVersion = '4.11'
+
+apply plugin: 'java'
+
+repositories {
+  maven {
+    url "http://conjars.org/repo/"
+  }
+}
+dependencies {
+  compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
+  compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
+  compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3'
+  compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
+  compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
+  compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
+  compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
+  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
+  compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
+  compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
+  testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
+  testCompile "junit:junit:4.11"
+  if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
+}
+
+jar {
+    from {
+        (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect {
+              zipTree(it)
+        }
+    }
+
+    exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
+}
+
+test {
+  // Change the default location where test data is picked up
+  systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
+  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
+  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
+}
+test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
new file mode 100644
index 0000000..d95c010
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -0,0 +1,485 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hadoop;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * A tool that generates API conformance tests for Hadoop libraries
+ */
+public class ApiExaminer {
+
+  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+  static private Set<String> unloadableClasses;
+
+  private List<String> errors;
+  private List<String> warnings;
+
+  static {
+    unloadableClasses = new HashSet<>();
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
+
+  }
+
+  public static void main(String[] args) {
+    Options options = new Options();
+
+    options.addOption("c", "compare", true,
+        "Compare against a spec, argument is the json file containing spec");
+    options.addOption("h", "help", false, "You're looking at it");
+    options.addOption("j", "jar", true, "Jar to examine");
+    options.addOption("p", "prepare-spec", true,
+        "Prepare the spec, argument is the directory to write the spec to");
+
+    try {
+      CommandLine cli = new GnuParser().parse(options, args);
+
+      if (cli.hasOption('h')) {
+        usage(options);
+        return;
+      }
+
+      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+          (cli.hasOption('c') && cli.hasOption('p'))) {
+        System.err.println("You must choose either -c or -p");
+        usage(options);
+        return;
+      }
+
+      if (!cli.hasOption('j')) {
+        System.err.println("You must specify the jar to prepare or compare");
+        usage(options);
+        return;
+      }
+
+      String jar = cli.getOptionValue('j');
+      ApiExaminer examiner = new ApiExaminer();
+
+      if (cli.hasOption('c')) {
+        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+      } else if (cli.hasOption('p')) {
+        examiner.prepareExpected(jar, cli.getOptionValue('p'));
+      }
+    } catch (Exception e) {
+      System.err.println("Received exception while processing");
+      e.printStackTrace();
+    }
+  }
+
+  private static void usage(Options options) {
+    HelpFormatter help = new HelpFormatter();
+    help.printHelp("api-examiner", options);
+
+  }
+
+  private ApiExaminer() {
+  }
+
+  private void prepareExpected(String jarFile, String outputDir) throws IOException,
+      ClassNotFoundException {
+    JarInfo jarInfo = new JarInfo(jarFile, this);
+    jarInfo.dumpToFile(new File(outputDir));
+  }
+
+  private void compareAgainstStandard(String json, String jarFile) throws IOException,
+      ClassNotFoundException {
+    errors = new ArrayList<>();
+    warnings = new ArrayList<>();
+    JarInfo underTest = new JarInfo(jarFile, this);
+    JarInfo standard = jarInfoFromFile(new File(json));
+    standard.compareAndReport(underTest);
+
+    if (errors.size() > 0) {
+      System.err.println("Found " + errors.size() + " incompatibilities:");
+      for (String error : errors) {
+        System.err.println(error);
+      }
+    }
+
+    if (warnings.size() > 0) {
+      System.err.println("Found " + warnings.size() + " possible issues: ");
+      for (String warning : warnings) {
+        System.err.println(warning);
+      }
+    }
+
+
+  }
+
+  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+    jarInfo.patchUpClassBackPointers(this);
+    return jarInfo;
+  }
+
+  private static class JarInfo {
+    String name;
+    String version;
+    ApiExaminer container;
+    Map<String, ClassInfo> classes;
+
+    // For use by Jackson
+    public JarInfo() {
+
+    }
+
+    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+      this.container = container;
+      LOG.info("Processing jar " + jarFile);
+      File f = new File(jarFile);
+      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+      Matcher matcher = pattern.matcher(f.getName());
+      if (!matcher.matches()) {
+        String msg = "Unable to determine name and version from " + f.getName();
+        LOG.error(msg);
+        throw new RuntimeException(msg);
+      }
+      name = matcher.group(1);
+      version = matcher.group(2);
+      classes = new HashMap<>();
+
+      JarFile jar = new JarFile(jarFile);
+      Enumeration<JarEntry> entries = jar.entries();
+      while (entries.hasMoreElements()) {
+        String name = entries.nextElement().getName();
+        if (name.endsWith(".class")) {
+          name = name.substring(0, name.length() - 6);
+          name = name.replace('/', '.');
+          if (!unloadableClasses.contains(name)) {
+            LOG.debug("Processing class " + name);
+            Class<?> clazz = Class.forName(name);
+            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+              classes.put(name, new ClassInfo(this, clazz));
+            }
+          }
+        }
+      }
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getVersion() {
+      return version;
+    }
+
+    public void setVersion(String version) {
+      this.version = version;
+    }
+
+    public Map<String, ClassInfo> getClasses() {
+      return classes;
+    }
+
+    public void setClasses(Map<String, ClassInfo> classes) {
+      this.classes = classes;
+    }
+
+    void compareAndReport(JarInfo underTest) {
+      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+      for (ClassInfo classInfo : classes.values()) {
+        if (underTestClasses.contains(classInfo)) {
+          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+          underTestClasses.remove(classInfo);
+        } else {
+          container.errors.add(underTest + " does not contain class " + classInfo);
+        }
+      }
+
+      if (underTestClasses.size() > 0) {
+        for (ClassInfo extra : underTestClasses) {
+          container.warnings.add(underTest + " contains extra class " + extra);
+        }
+      }
+    }
+
+    void dumpToFile(File outputDir) throws IOException {
+      File output = new File(outputDir, name + "-" + version + "-api-report.json");
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.writeValue(output, this);
+    }
+
+    void patchUpClassBackPointers(ApiExaminer container) {
+      this.container = container;
+      for (ClassInfo classInfo : classes.values()) {
+        classInfo.setJar(this);
+        classInfo.patchUpBackMethodBackPointers();
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof JarInfo)) return false;
+      JarInfo that = (JarInfo)other;
+      return name.equals(that.name) && version.equals(that.version);
+    }
+
+    @Override
+    public String toString() {
+      return name + "-" + version;
+    }
+  }
+
+  private static class ClassInfo {
+    @JsonIgnore JarInfo jar;
+    String name;
+    Map<String, MethodInfo> methods;
+
+    // For use by Jackson
+    public ClassInfo() {
+
+    }
+
+    ClassInfo(JarInfo jar, Class<?> clazz) {
+      this.jar = jar;
+      this.name = clazz.getName();
+      methods = new HashMap<>();
+
+      for (Method method : clazz.getMethods()) {
+        if (method.getDeclaringClass().equals(clazz)) {
+          LOG.debug("Processing method " + method.getName());
+          MethodInfo mi = new MethodInfo(this, method);
+          methods.put(mi.toString(), mi);
+        }
+      }
+    }
+
+    public JarInfo getJar() {
+      return jar;
+    }
+
+    public void setJar(JarInfo jar) {
+      this.jar = jar;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public Map<String, MethodInfo> getMethods() {
+      return methods;
+    }
+
+    public void setMethods(Map<String, MethodInfo> methods) {
+      this.methods = methods;
+    }
+
+    void compareAndReport(ClassInfo underTest) {
+      // Make a copy so we can remove them as we match them, making it easy to find additional ones
+      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+      for (MethodInfo methodInfo : methods.values()) {
+        if (underTestMethods.contains(methodInfo)) {
+          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+          underTestMethods.remove(methodInfo);
+        } else {
+          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+        }
+      }
+
+      if (underTestMethods.size() > 0) {
+        for (MethodInfo extra : underTestMethods) {
+          jar.container.warnings.add(underTest + " contains extra method " + extra);
+        }
+      }
+    }
+
+    void patchUpBackMethodBackPointers() {
+      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof ClassInfo)) return false;
+      ClassInfo that = (ClassInfo)other;
+      return name.equals(that.name);  // Classes can be compared just on names
+    }
+
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      return jar + " " + name;
+    }
+  }
+
+  private static class MethodInfo {
+    @JsonIgnore ClassInfo containingClass;
+    String name;
+    String returnType;
+    List<String> args;
+    Set<String> exceptions;
+
+    // For use by Jackson
+    public MethodInfo() {
+
+    }
+
+    MethodInfo(ClassInfo containingClass, Method method) {
+      this.containingClass = containingClass;
+      this.name = method.getName();
+      args = new ArrayList<>();
+      for (Class<?> argClass : method.getParameterTypes()) {
+        args.add(argClass.getName());
+      }
+      returnType = method.getReturnType().getName();
+      exceptions = new HashSet<>();
+      for (Class<?> exception : method.getExceptionTypes()) {
+        exceptions.add(exception.getName());
+      }
+    }
+
+    public ClassInfo getContainingClass() {
+      return containingClass;
+    }
+
+    public void setContainingClass(ClassInfo containingClass) {
+      this.containingClass = containingClass;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getReturnType() {
+      return returnType;
+    }
+
+    public void setReturnType(String returnType) {
+      this.returnType = returnType;
+    }
+
+    public List<String> getArgs() {
+      return args;
+    }
+
+    public void setArgs(List<String> args) {
+      this.args = args;
+    }
+
+    public Set<String> getExceptions() {
+      return exceptions;
+    }
+
+    public void setExceptions(Set<String> exceptions) {
+      this.exceptions = exceptions;
+    }
+
+    void compareAndReport(MethodInfo underTest) {
+      // Check to see if they've added or removed exceptions
+      // Make a copy so I can remove them as I check them off and easily find any that have been
+      // added.
+      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+      for (String exception : exceptions) {
+        if (underTest.exceptions.contains(exception)) {
+          underTestExceptions.remove(exception);
+        } else {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " removes exception " + exception);
+        }
+      }
+      if (underTestExceptions.size() > 0) {
+        for (String underTestException : underTest.exceptions) {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " adds exception " + underTestException);
+        }
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof MethodInfo)) return false;
+      MethodInfo that = (MethodInfo)other;
+
+      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+          returnType.equals(that.returnType) && args.equals(that.args);
+    }
+
+    @Override
+    public int hashCode() {
+      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+          args.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder buf = new StringBuilder(returnType)
+          .append(" ")
+          .append(name)
+          .append('(');
+      boolean first = true;
+      for (String arg : args) {
+        if (first) first = false;
+        else buf.append(", ");
+        buf.append(arg);
+      }
+      buf.append(")");
+      if (exceptions.size() > 0) {
+        buf.append(" throws ");
+        first = true;
+        for (String exception : exceptions) {
+          if (first) first = false;
+          else buf.append(", ");
+          buf.append(exception);
+        }
+      }
+      return buf.toString();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
new file mode 100644
index 0000000..4110d5d
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.hcatalog.data.DefaultHCatRecord;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.StringTokenizer;
+
+public class HCatalogMR extends Configured implements Tool {
+  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
+  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
+
+  @Override
+  public int run(String[] args) throws Exception {
+    String inputTable = null;
+    String outputTable = null;
+    String inputSchemaStr = null;
+    String outputSchemaStr = null;
+    for(int i = 0; i < args.length; i++){
+        if(args[i].equalsIgnoreCase("-it")){
+            inputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-ot")){
+            outputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-is")){
+            inputSchemaStr = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-os")){
+            outputSchemaStr = args[i+1];
+        }
+    }
+    
+    Configuration conf = getConf();
+    args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+    conf.set(INPUT_SCHEMA, inputSchemaStr);
+    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+    Job job = new Job(conf, "odpi_hcat_test");
+    HCatInputFormat.setInput(job, "default", inputTable);
+
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setJarByClass(HCatalogMR.class);
+    job.setMapperClass(Map.class);
+    job.setReducerClass(Reduce.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    job.setOutputKeyClass(WritableComparable.class);
+    job.setOutputValueClass(HCatRecord.class);
+    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+    job.setOutputFormatClass(HCatOutputFormat.class);
+
+    return job.waitForCompletion(true) ? 0 : 1;
+
+
+  }
+  public static class Map extends Mapper<WritableComparable,
+          HCatRecord, Text, IntWritable> {
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+    private HCatSchema inputSchema = null;
+
+    @Override
+    protected void map(WritableComparable key, HCatRecord value, Context context)
+        throws IOException, InterruptedException {
+      if (inputSchema == null) {
+        inputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+      }
+      String line = value.getString("line", inputSchema);
+      StringTokenizer tokenizer = new StringTokenizer(line);
+      while (tokenizer.hasMoreTokens()) {
+        word.set(tokenizer.nextToken());
+        context.write(word, one);
+      }
+    }
+  }
+
+  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+    private HCatSchema outputSchema = null;
+
+    @Override
+    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+        IOException, InterruptedException {
+      if (outputSchema == null) {
+        outputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+      }
+      int sum = 0;
+      for (IntWritable i : values) {
+        sum += i.get();
+      }
+      HCatRecord output = new DefaultHCatRecord(2);
+      output.set("word", outputSchema, key);
+      output.set("count", outputSchema, sum);
+      context.write(null, output);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    int exitCode = ToolRunner.run(new HCatalogMR(), args);
+    System.exit(exitCode);
+  }
+ }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
new file mode 100755
index 0000000..8c9ab5e
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/resources/api-examiner-prep.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+############################################################################
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+############################################################################
+
+############################################################################
+# This script is used to generate the hadoop-*-api.report.json files in the
+# test/resources directory.  To use it, you will first need to download an
+# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the
+# directory where you untar that distribution.  You will then need to set
+# BIGTTOP_HOME to the directory where your bigtop source is located.  Then
+# run this script for each of the jars you want to generate a report for.
+# The arguments passed to this script should be -p <outputdir> -j <jarfile>
+# where outputdir is the directory you'd like to write the report to and
+# jarfile is the full path of the jar to generate the report for.  Reports
+# should be generated for the following jars: hadoop-common, hadoop-hdfs,
+# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and
+# hadoop-mapreduce-client-core
+#
+# Example usage:
+# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3
+# export BIGTOP_HOME=/home/me/git/bigtop
+# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
+#
+# The resulting reports should be committed to git.  This script only needs
+# to be run once per ODPi release.
+############################################################################
+
+
+if [ "x${APACHE_HADOOP_DIR}" = "x" ]
+then
+    echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"
+    exit 1
+fi
+
+if [ "x${BIGTOP_HOME}" = "x" ]
+then
+    echo "You must set BIGTOP_HOME to the root directory for your bigtop source"
+    exit 1
+fi
+
+for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+for jar in `find $APACHE_HADOOP_DIR -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
new file mode 100644
index 0000000..bc2a3b2
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime
+
+import groovy.io.FileType
+import org.junit.Assert
+import org.apache.bigtop.itest.shell.*
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.Parameterized
+import org.junit.runners.Parameterized.Parameters
+
+import java.util.regex.Matcher
+import java.util.regex.Pattern
+
+/**
+ * Check all expected environment
+ * Tests are constructed dynamically, using external DSL to define
+ * - test name
+ * - test type
+ * - command to execute the test
+ * - expected pattern of the output
+ */
+@RunWith(Parameterized.class)
+public class TestSpecsRuntime {
+  private String testName
+  private String type
+  private Map arguments
+
+  private static ENV = System.getenv()
+
+  @Parameters(name="{0}")
+  public static Collection<Object[]> allTests() {
+    List<Object[]> specs = [];
+
+    config.specs.tests.each { test ->
+      specs.add([test.value.name, test.value.type, test.value.arguments] as Object[])
+    }
+    return specs
+  }
+
+  public TestSpecsRuntime (String testName, String type, Map arguments) {
+    this.testName = testName
+    this.type = type
+    this.arguments = arguments
+  }
+
+  public static final String testsList = System.properties['test.resources.dir'] ?:
+      "${System.properties['buildDir']}/resources/test"
+  def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}"))
+
+  private static String getTestConfigName() {
+    return "$testsList/testRuntimeSpecConf.groovy";
+  }
+
+  private Map getEnvMap(String command) {
+    def envMap = [:]
+    Shell sh = new Shell()
+    def envvars = sh.exec(command).getOut()
+    if (sh.getRet() == 0) {
+      envvars.each {
+        def match = it =~ /(?<variable>[^=]+)='(?<value>[^']+)'$/
+        if ( match.matches() ) {
+          envMap[match.group('variable')] = match.group('value')
+        }
+      }
+    }
+    return envMap
+  }
+
+  private String getEnv(String name, String cmd) {
+    String value = ENV[name]
+    if (value == null) {
+       value = getEnvMap(cmd)[name]
+    }
+    return value
+  }
+
+  @Test
+  public void testAll() {
+    switch (type) {
+      case 'shell':
+        Shell sh = new Shell()
+        def output = sh.exec(arguments['command']).getOut().join("\n")
+        int actualResult = sh.getRet()
+        int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code
+        Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}",
+            actualResult == expectedResult)
+        break
+
+      case 'envdir':
+        def var = arguments['variable']
+        def isPathRelative = arguments['relative']
+        def pathString = getEnv(var, arguments['envcmd'])
+        Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null )
+
+        if ( arguments['pattern'] ) {
+            Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern",
+                pathString ==~ /${arguments['pattern']}/)
+        }
+
+        def pathFile = new File(pathString)
+        if ( isPathRelative ) {
+            Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() )
+        } else {
+            if (!arguments['donotcheckexistance']) {
+              Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() )
+              Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() )
+            }
+        }
+        break
+
+      case 'dirstruct':
+        def expectedFiles = []
+        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
+           expectedFiles << ~line
+        }
+        def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue",
+          baseDirEnv)
+        def root = new File(baseDirEnv)
+        def actualFiles = []
+        def missingFiles = []
+        if ( ! root.exists() ) {
+          Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!");
+        }
+
+        root.eachFileRecurse(FileType.ANY) { file ->
+          def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path
+          actualFiles << relPath
+        }
+
+        expectedFiles.each { wantFile ->
+          def ok = false
+          for (def x : actualFiles) {
+            if (actualFiles =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (!ok) {
+            missingFiles << wantFile
+          }
+        }
+
+        Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ",
+          missingFiles.size() == 0)
+        break
+
+      case 'dircontent':
+        def expectedFiles = []
+        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
+          expectedFiles << ~line
+        }
+
+        def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        def subDir = arguments['subDir']
+        if (!subDir && arguments['subDirEnv']) {
+          subDir = getEnv(arguments['subDirEnv'], arguments['envcmd'])
+        }
+
+        def dir = null
+        if (subDir) {
+          dir = new File(baseDir, subDir)
+        } else {
+          dir = new File(baseDir)
+        }
+        Assert.assertNotNull("Directory has to be set for the test to continue", dir)
+
+        def actualFiles = []
+        if (dir.exists()) {
+          dir.eachFile FileType.FILES, { file ->
+            def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path
+            actualFiles << relPath
+          }
+        }
+
+        def missingList = []
+        for (def wantFile : expectedFiles) {
+          def ok = false
+          for (def haveFile : actualFiles) {
+            if (haveFile =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (! ok) {
+            missingList << wantFile
+          }
+        }
+
+        def extraList = []
+        for (def haveFile : actualFiles) {
+          def ok = false
+          for (def wantFile : expectedFiles) {
+            if (haveFile =~ wantFile) {
+              ok = true
+              break
+            }
+          }
+          if (! ok) {
+            extraList << haveFile
+          }
+        }
+
+        def commonFiles = actualFiles.intersect(expectedFiles)
+        Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}",
+           missingList.size() == 0 && extraList.size() == 0)
+        break
+      case 'hadoop_tools':
+        def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars")
+        Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr)
+
+        def toolsPath = new File(toolsPathStr)
+        Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute())
+
+        Shell sh = new Shell()
+        def classPath = sh.exec("hadoop classpath").getOut().join("\n")
+        Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0)
+
+        Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath",
+          classPath.split(File.pathSeparator).any {
+            new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/
+          }
+        )
+        break
+      case 'api_examination':
+        def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
+        def libdir = getEnv(arguments['libDir'], arguments['envcmd'])
+
+        def dir = new File(basedir + "/" + libdir)
+        Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory())
+        def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar")
+        def String[] jars = dir.list(new FilenameFilter() {
+          @Override
+          boolean accept(File d, String name) {
+            Matcher matcher = pattern.matcher(name)
+            return (matcher.matches() && !name.contains("test"))
+          }
+        })
+        Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
+        def jar = dir.getAbsolutePath() + "/" + jars[0]
+
+        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
+        def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
+        Shell sh = new Shell()
+        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
+        int rc = sh.getRet()
+        Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
+        if (results.size() > 0) {
+          System.out.println("Received report for jar " + arguments['jar'] + results.join("\n"))
+        }
+        break;
+
+
+      default:
+        break
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
new file mode 100644
index 0000000..3e56224
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecuteResultHandler;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.exec.environment.EnvironmentUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HiveHelper {
+	
+	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
+
+	public static Map<String, String> execCommand(CommandLine commandline) {
+		return execCommand(commandline, null);
+	}
+
+	public static Map<String, String> execCommand(CommandLine commandline,
+																								Map<String, String> envVars) {
+		
+		System.out.println("Executing command:");
+		System.out.println(commandline.toString());
+		Map<String, String> env = null;
+		Map<String, String> entry = new HashMap<String, String>();
+		try {
+			env = EnvironmentUtils.getProcEnvironment();
+		} catch (IOException e1) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to get process environment: "+ e1.getMessage());
+			e1.printStackTrace();
+		}
+		if (envVars != null) {
+			for (String key : envVars.keySet()) {
+				env.put(key, envVars.get(key));
+			}
+		}
+
+		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
+		Executor executor = new DefaultExecutor();
+		executor.setExitValue(1);
+		executor.setWatchdog(watchdog);
+		executor.setStreamHandler(streamHandler);
+		try {
+			executor.execute(commandline, env, resultHandler);
+		} catch (ExecuteException e) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
+			e.printStackTrace();
+			return entry;
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
+			e.printStackTrace();
+			return entry;
+		}
+		
+		try {
+			resultHandler.waitFor();
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			return entry;
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			/*System.out.println("Command output: "+outputStream.toString());*/
+			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString());
+			e.printStackTrace();		
+			return entry;
+		}
+	}
+	
+	protected static String getProperty(String property, String description) {
+		String val = System.getProperty(property);
+		if (val == null) {
+			throw new RuntimeException("You must set the property " + property + " with " +
+				description);
+		}
+		LOG.debug(description + " is " + val);
+		return val;
+	 }
+	
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
new file mode 100644
index 0000000..7512dab
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.Properties;
+
+public class JdbcConnector {
+  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+
+  protected static final String URL = "odpi.test.hive.jdbc.url";
+  protected static final String USER = "odpi.test.hive.jdbc.user";
+  protected static final String PASSWD = "odpi.test.hive.jdbc.password";
+  protected static final String LOCATION = "odpi.test.hive.location";
+  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
+  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
+  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
+  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
+  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
+
+  protected static Connection conn;
+
+  @BeforeClass
+  public static void connectToJdbc() throws SQLException {
+    // Assume they've put the URL for the JDBC driver in an environment variable.
+    String jdbcUrl = getProperty(URL, "the JDBC URL");
+    String jdbcUser = getProperty(USER, "the JDBC user name");
+    String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
+
+    Properties props = new Properties();
+    props.put("user", jdbcUser);
+    if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
+    conn = DriverManager.getConnection(jdbcUrl, props);
+  }
+
+  @AfterClass
+  public static void closeJdbc() throws SQLException {
+    if (conn != null) conn.close();
+  }
+
+  protected static String getProperty(String property, String description) {
+    String val = System.getProperty(property);
+    if (val == null) {
+      throw new RuntimeException("You must set the property " + property + " with " +
+          description);
+    }
+    LOG.debug(description + " is " + val);
+    return val;
+  }
+
+  protected static boolean testActive(String property, String description) {
+    String val = System.getProperty(property, "true");
+    LOG.debug(description + " is " + val);
+    return Boolean.valueOf(val);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
new file mode 100644
index 0000000..578621a
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -0,0 +1,201 @@
+package org.odpi.specs.runtime.hive;
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class TestBeeline {
+	
+	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+	
+	private static final String URL = "odpi.test.hive.jdbc.url";
+	private static final String USER = "odpi.test.hive.jdbc.user";
+	private static final String PASSWD = "odpi.test.hive.jdbc.password";
+	
+	private static Map<String, String> results;
+	private static String beelineUrl; 
+	private static String beelineUser;
+	private static String beelinePasswd;
+	
+	//creating beeline base command with username and password as per inputs
+	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
+
+	@BeforeClass
+	public static void initialSetup(){
+		TestBeeline.beelineUrl = System.getProperty(URL);
+		TestBeeline.beelineUser = System.getProperty(USER);
+		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
+
+		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
+		{ 
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
+		}
+		else if (beelineUser != null && beelineUser != "") 
+		{ 
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+		}
+		else {
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
+		}
+		LOG.info("URL is " + beelineUrl); 
+		LOG.info("User is " + beelineUser);
+		LOG.info("Passwd is " + beelinePasswd); 
+		LOG.info("Passwd is null " + (beelinePasswd == null));
+	}
+
+	@Test
+	public void checkBeeline() {
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineConnect(){
+		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
+		catch (FileNotFoundException e1) {
+			e1.printStackTrace();
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
+	}
+	
+	@Test
+	public void checkBeelineHelp(){
+		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineQueryExecFromCmdLine(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		}
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+	}
+	
+	@Test
+	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
+		
+		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
+		
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
+
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
+	}
+	
+	@Test
+	public void checkBeelineInitFile() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
+	
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
+	}
+	
+	@Test
+	public void checkBeelineHiveVar() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+	
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
+		}
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
+	}
+	
+	@Test
+	public void checkBeelineFastConnect(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
+	}
+
+	@Test
+	public void checkBeelineVerbose(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+	
+	@Test
+	public void checkBeelineShowHeader(){
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+
+	@AfterClass
+	public static void cleanup() throws FileNotFoundException {
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+	}
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
new file mode 100644
index 0000000..2b70909
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.AfterClass;
+import org.junit.Assert;
+
+public class TestCLI {
+	
+	static Map<String, String> results;
+	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
+	
+	@BeforeClass
+	public static void setup(){
+		
+		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
+		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
+	}
+	
+	@Test
+	public void help(){		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
+		//LOG.info(results.get("exitValue"));
+		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
+		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
+		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
+	}
+	 
+	@Test
+	public void sqlFromCmdLine(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void sqlFromFiles() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void silent() {
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
+	}
+	
+	@Test
+	public void verbose(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
+	}
+	
+	@Test
+	public void initialization() throws FileNotFoundException{
+		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
+			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void database(){
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void hiveConf(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
+	}
+	
+	@Test
+	public void variableSubsitution() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@Test
+	public void hiveVar() throws FileNotFoundException{
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		}
+		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
+		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
+		
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+	}
+	
+	@AfterClass
+	public static void cleanup(){
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
+	}
+	 
+}


[29/50] [abbrv] bigtop git commit: Update hadoop-common-jar.list (cherry picked from commit 06615fec9bb8a4a3582f7db3254a09da2f230149)

Posted by rv...@apache.org.
Update hadoop-common-jar.list
(cherry picked from commit 06615fec9bb8a4a3582f7db3254a09da2f230149)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/fe2d31d8
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/fe2d31d8
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/fe2d31d8

Branch: refs/heads/master
Commit: fe2d31d8dbe5d0acff4dafe588c4bf043664d419
Parents: 4f19c15
Author: Raj Desai <rd...@us.ibm.com>
Authored: Wed Mar 8 15:42:26 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:15 2017 -0700

----------------------------------------------------------------------
 .../spec-tests/runtime/src/test/resources/hadoop-common-jar.list | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/fe2d31d8/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
index 4b687ba..2edbd0f 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
@@ -17,7 +17,7 @@ curator-client-2\.7\.1[\.\-_].*jar
 jets3t-0\.9\.0[\.\-_].*jar
 commons-net-3\.1[\.\-_].*jar
 jaxb-impl-2\.2\.3-1[\.\-_].*jar
-httpclient-4\.2\.5[\.\-_].*jar
+httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
 apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
 commons-cli-1\.2[\.\-_].*jar
 log4j-1\.2\.17[\.\-_].*jar
@@ -47,7 +47,7 @@ commons-lang-2\.6[\.\-_].*jar
 jetty-util-6\.1\.26[\.\-_].*jar
 jsr305-3\.0\.0[\.\-_].*jar
 protobuf-java-2\.5\.0[\.\-_].*jar
-httpcore-4\.2\.5[\.\-_].*jar
+httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
 commons-io-2\.4[\.\-_].*jar
 activation-1\.1[\.\-_].*jar
 jersey-json-1\.9[\.\-_].*jar


[25/50] [abbrv] bigtop git commit: ODPI-63 Start of a solution. Code compiles and seems to run against hadoop-common for Apache and HDP. Haven't tried it past that. Lots of work still to connect it into bigtop framework.

Posted by rv...@apache.org.
ODPI-63 Start of a solution.  Code compiles and seems to run against hadoop-common for Apache and HDP.  Haven't tried it past that.  Lots of work still to connect it into bigtop framework.

(cherry picked from commit 51cac9914312aa191a1748eb44b4c5a026b7f4bf)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/29eebd0d
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/29eebd0d
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/29eebd0d

Branch: refs/heads/master
Commit: 29eebd0d8d627eb0a3e219e259a7dd4cd368f924
Parents: f0e2e03
Author: Alan Gates <al...@gmail.com>
Authored: Tue Mar 7 17:22:10 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:14 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle    |   1 +
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 504 +++++++++++++++++++
 2 files changed, 505 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/29eebd0d/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index a88a3b6..0eadd96 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -40,6 +40,7 @@ dependencies {
   testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
   testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
   testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
+    testCompile "junit:junit:4.11"
   if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
 }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/29eebd0d/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
new file mode 100644
index 0000000..c49be13
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -0,0 +1,504 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hadoop;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * A tool that generates API conformance tests for Hadoop libraries
+ */
+public class ApiExaminer {
+
+  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+  static private Set<String> unloadableClasses;
+  //static private List<String> jarsWeCareAbout;
+
+  private List<String> errors;
+  private List<String> warnings;
+
+  static {
+    unloadableClasses = new HashSet<>();
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
+
+    /*
+    jarsWeCareAbout = new ArrayList<>();
+    jarsWeCareAbout.add("hadoop-hdfs");
+    jarsWeCareAbout.add("hadoop-yarn-common");
+    jarsWeCareAbout.add("hadoop-yarn-client");
+    jarsWeCareAbout.add("hadoop-yarn-api");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-app");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-common");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-core");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-hs");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-hs-plugins");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-jobclient");
+    jarsWeCareAbout.add("hadoop-mapreduce-client-shuffle");
+    */
+  }
+
+  public static void main(String[] args) {
+    Options options = new Options();
+
+    options.addOption("c", "compare", true,
+        "Compare against a spec, argument is the json file containing spec");
+    options.addOption("h", "help", false, "You're looking at it");
+    options.addOption("j", "jar", true, "Jar to examine");
+    options.addOption("p", "prepare-spec", true,
+        "Prepare the spec, argument is the directory to write the spec to");
+
+    try {
+      CommandLine cli = new GnuParser().parse(options, args);
+
+      if (cli.hasOption('h')) {
+        usage(options);
+        return;
+      }
+
+      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+          (cli.hasOption('c') && cli.hasOption('p'))) {
+        System.err.println("You must choose either -c or -p");
+        usage(options);
+        return;
+      }
+
+      if (!cli.hasOption('j')) {
+        System.err.println("You must specify the jar to prepare or compare");
+        usage(options);
+        return;
+      }
+
+      String jar = cli.getOptionValue('j');
+      ApiExaminer examiner = new ApiExaminer();
+
+      if (cli.hasOption('c')) {
+        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+      } else if (cli.hasOption('p')) {
+        examiner.prepareExpected(jar, cli.getOptionValue('p'));
+      }
+    } catch (Exception e) {
+      System.err.println("Received exception while processing");
+      e.printStackTrace();
+    }
+  }
+
+  private static void usage(Options options) {
+    HelpFormatter help = new HelpFormatter();
+    help.printHelp("api-examiner", options);
+
+  }
+
+  private ApiExaminer() {
+  }
+
+  private void prepareExpected(String jarFile, String outputDir) throws IOException,
+      ClassNotFoundException {
+    JarInfo jarInfo = new JarInfo(jarFile, this);
+    jarInfo.dumpToFile(new File(outputDir));
+  }
+
+  private void compareAgainstStandard(String json, String jarFile) throws IOException,
+      ClassNotFoundException {
+    errors = new ArrayList<>();
+    warnings = new ArrayList<>();
+    JarInfo underTest = new JarInfo(jarFile, this);
+    JarInfo standard = jarInfoFromFile(new File(json));
+    standard.compareAndReport(underTest);
+
+    if (errors.size() > 0) {
+      System.err.println("Found " + errors.size() + " incompatibilities:");
+      for (String error : errors) {
+        System.err.println(error);
+      }
+    }
+
+    if (warnings.size() > 0) {
+      System.err.println("Found " + warnings.size() + " possible issues: ");
+      for (String warning : warnings) {
+        System.err.println(warning);
+      }
+    }
+
+
+  }
+
+  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+    jarInfo.patchUpClassBackPointers(this);
+    return jarInfo;
+  }
+
+  private static class JarInfo {
+    String name;
+    String version;
+    ApiExaminer container;
+    Map<String, ClassInfo> classes;
+
+    // For use by Jackson
+    public JarInfo() {
+
+    }
+
+    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+      this.container = container;
+      LOG.info("Processing jar " + jarFile);
+      File f = new File(jarFile);
+      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+      Matcher matcher = pattern.matcher(f.getName());
+      if (!matcher.matches()) {
+        String msg = "Unable to determine name and version from " + f.getName();
+        LOG.error(msg);
+        throw new RuntimeException(msg);
+      }
+      name = matcher.group(1);
+      version = matcher.group(2);
+      classes = new HashMap<>();
+
+      JarFile jar = new JarFile(jarFile);
+      Enumeration<JarEntry> entries = jar.entries();
+      while (entries.hasMoreElements()) {
+        String name = entries.nextElement().getName();
+        if (name.endsWith(".class")) {
+          name = name.substring(0, name.length() - 6);
+          name = name.replace('/', '.');
+          if (!unloadableClasses.contains(name)) {
+            LOG.debug("Processing class " + name);
+            Class<?> clazz = Class.forName(name);
+            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+              classes.put(name, new ClassInfo(this, clazz));
+            }
+          }
+        }
+      }
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getVersion() {
+      return version;
+    }
+
+    public void setVersion(String version) {
+      this.version = version;
+    }
+
+    public Map<String, ClassInfo> getClasses() {
+      return classes;
+    }
+
+    public void setClasses(Map<String, ClassInfo> classes) {
+      this.classes = classes;
+    }
+
+    void compareAndReport(JarInfo underTest) {
+      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+      for (ClassInfo classInfo : classes.values()) {
+        if (underTestClasses.contains(classInfo)) {
+          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+          underTestClasses.remove(classInfo);
+        } else {
+          container.errors.add(underTest + " does not contain class " + classInfo);
+        }
+      }
+
+      if (underTestClasses.size() > 0) {
+        for (ClassInfo extra : underTestClasses) {
+          container.warnings.add(underTest + " contains extra class " + extra);
+        }
+      }
+    }
+
+    void dumpToFile(File outputDir) throws IOException {
+      File output = new File(outputDir, name + "-" + version + "-api-report.json");
+      ObjectMapper mapper = new ObjectMapper();
+      mapper.writeValue(output, this);
+    }
+
+    void patchUpClassBackPointers(ApiExaminer container) {
+      this.container = container;
+      for (ClassInfo classInfo : classes.values()) {
+        classInfo.setJar(this);
+        classInfo.patchUpBackMethodBackPointers();
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof JarInfo)) return false;
+      JarInfo that = (JarInfo)other;
+      return name.equals(that.name) && version.equals(that.version);
+    }
+
+    @Override
+    public String toString() {
+      return name + "-" + version;
+    }
+  }
+
+  private static class ClassInfo {
+    @JsonIgnore JarInfo jar;
+    String name;
+    Map<String, MethodInfo> methods;
+
+    // For use by Jackson
+    public ClassInfo() {
+
+    }
+
+    ClassInfo(JarInfo jar, Class<?> clazz) {
+      this.jar = jar;
+      this.name = clazz.getName();
+      methods = new HashMap<>();
+
+      for (Method method : clazz.getMethods()) {
+        if (method.getDeclaringClass().equals(clazz)) {
+          LOG.debug("Processing method " + method.getName());
+          MethodInfo mi = new MethodInfo(this, method);
+          methods.put(mi.toString(), mi);
+        }
+      }
+    }
+
+    public JarInfo getJar() {
+      return jar;
+    }
+
+    public void setJar(JarInfo jar) {
+      this.jar = jar;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public Map<String, MethodInfo> getMethods() {
+      return methods;
+    }
+
+    public void setMethods(Map<String, MethodInfo> methods) {
+      this.methods = methods;
+    }
+
+    void compareAndReport(ClassInfo underTest) {
+      // Make a copy so we can remove them as we match them, making it easy to find additional ones
+      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+      for (MethodInfo methodInfo : methods.values()) {
+        if (underTestMethods.contains(methodInfo)) {
+          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+          underTestMethods.remove(methodInfo);
+        } else {
+          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+        }
+      }
+
+      if (underTestMethods.size() > 0) {
+        for (MethodInfo extra : underTestMethods) {
+          jar.container.warnings.add(underTest + " contains extra method " + extra);
+        }
+      }
+    }
+
+    void patchUpBackMethodBackPointers() {
+      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof ClassInfo)) return false;
+      ClassInfo that = (ClassInfo)other;
+      return name.equals(that.name);  // Classes can be compared just on names
+    }
+
+    @Override
+    public int hashCode() {
+      return name.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      return jar + " " + name;
+    }
+  }
+
+  private static class MethodInfo {
+    @JsonIgnore ClassInfo containingClass;
+    String name;
+    String returnType;
+    List<String> args;
+    Set<String> exceptions;
+
+    // For use by Jackson
+    public MethodInfo() {
+
+    }
+
+    MethodInfo(ClassInfo containingClass, Method method) {
+      this.containingClass = containingClass;
+      this.name = method.getName();
+      args = new ArrayList<>();
+      for (Class<?> argClass : method.getParameterTypes()) {
+        args.add(argClass.getName());
+      }
+      returnType = method.getReturnType().getName();
+      exceptions = new HashSet<>();
+      for (Class<?> exception : method.getExceptionTypes()) {
+        exceptions.add(exception.getName());
+      }
+    }
+
+    public ClassInfo getContainingClass() {
+      return containingClass;
+    }
+
+    public void setContainingClass(ClassInfo containingClass) {
+      this.containingClass = containingClass;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(String name) {
+      this.name = name;
+    }
+
+    public String getReturnType() {
+      return returnType;
+    }
+
+    public void setReturnType(String returnType) {
+      this.returnType = returnType;
+    }
+
+    public List<String> getArgs() {
+      return args;
+    }
+
+    public void setArgs(List<String> args) {
+      this.args = args;
+    }
+
+    public Set<String> getExceptions() {
+      return exceptions;
+    }
+
+    public void setExceptions(Set<String> exceptions) {
+      this.exceptions = exceptions;
+    }
+
+    void compareAndReport(MethodInfo underTest) {
+      // Check to see if they've added or removed exceptions
+      // Make a copy so I can remove them as I check them off and easily find any that have been
+      // added.
+      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+      for (String exception : exceptions) {
+        if (underTest.exceptions.contains(exception)) {
+          underTestExceptions.remove(exception);
+        } else {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " removes exception " + exception);
+        }
+      }
+      if (underTestExceptions.size() > 0) {
+        for (String underTestException : underTest.exceptions) {
+          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+              underTest.containingClass + "." + name + " adds exception " + underTestException);
+        }
+      }
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (!(other instanceof MethodInfo)) return false;
+      MethodInfo that = (MethodInfo)other;
+
+      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+          returnType.equals(that.returnType) && args.equals(that.args);
+    }
+
+    @Override
+    public int hashCode() {
+      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+          args.hashCode();
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder buf = new StringBuilder(returnType)
+          .append(" ")
+          .append(name)
+          .append('(');
+      boolean first = true;
+      for (String arg : args) {
+        if (first) first = false;
+        else buf.append(", ");
+        buf.append(arg);
+      }
+      buf.append(")");
+      if (exceptions.size() > 0) {
+        buf.append(" throws ");
+        first = true;
+        for (String exception : exceptions) {
+          if (first) first = false;
+          else buf.append(", ");
+          buf.append(exception);
+        }
+      }
+      return buf.toString();
+    }
+  }
+}


[26/50] [abbrv] bigtop git commit: Update hadoop-mapreduce.list (cherry picked from commit ee5b1e4f6686029663d30f94d769b6a33968fe94)

Posted by rv...@apache.org.
Update hadoop-mapreduce.list
(cherry picked from commit ee5b1e4f6686029663d30f94d769b6a33968fe94)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/933a7700
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/933a7700
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/933a7700

Branch: refs/heads/master
Commit: 933a7700ffc46cc6c322bfc02cbd8f8fc844dc57
Parents: fe2d31d
Author: Raj Desai <rd...@us.ibm.com>
Authored: Wed Mar 8 15:54:01 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:15 2017 -0700

----------------------------------------------------------------------
 .../spec-tests/runtime/src/test/resources/hadoop-mapreduce.list  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/933a7700/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
index b415afe..cc06d80 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
@@ -31,7 +31,7 @@ hadoop-mapreduce-client-hs-plugins-2\.7\.[0-9][\.\-_].*jar
 hadoop-sls[\.\-_].*jar
 hadoop-ant[\.\-_].*jar
 netty-3\.6\.2\.Final[\.\-_].*jar
-httpcore-4\.2\.5[\.\-_].*jar
+httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
 jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
 hadoop-mapreduce-client-jobclient[\.\-_].*jar
 hadoop-archives[\.\-_].*jar
@@ -85,7 +85,7 @@ hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
 hadoop-datajoin[\.\-_].*jar
 apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
 java-xmlbuilder-0\.4[\.\-_].*jar
-httpclient-4\.2\.5[\.\-_].*jar
+httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
 hadoop-rumen-2\.7\.[0-9][\.\-_].*jar
 hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar
 guava-11\.0\.2[\.\-_].*jar


[22/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
new file mode 100644
index 0000000..6a6c7af
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-common","version":"2.7.3","classes":{"org.apache.hadoop.record.RecordInput":{"name":"org.apache.hadoop.record.RecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"]
 ,"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","re
 turnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.NullWritable":{"name":"org.apache.hadoop.io.NullWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.NullWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.NullWr
 itable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.NullWritable get()":{"name":"get","returnType":"org.apache.hadoop.io.NullWritable","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.XmlRecordInput":{"name":"org.apache.hadoop.record.XmlRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOEx
 ception":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOExcept
 ion"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.
 lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileSystem":{"name":"org.apache.hadoop.fs.FileSystem","methods":{"org.apache.hadoop.security.token.Token getDelegationToken(java.lang.String) throws java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSyst
 em","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.ContentSummary getContentSummary(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getContentSummary","returnType":"org.apache.hadoop.fs.ContentSummary","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream crea
 te(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.ut
 il.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.Class getFileSystemClass(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystemClass","returnType":"java.lang.Class","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache
 .hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.Map getStatistics()":{"name":"getStatistics","returnType":"java.util.Map","args":[],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FileSystem$Statistics getStatistics(java.lang.String, java.lang.Class)":{"name":"getStatistics","returnType":"org.apache.hadoop.fs.FileSystem$Statistics","args":["java.lang.String","java.lang.Class"],"excep
 tions":[]},"boolean isFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","return
 Type":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listFiles(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listFiles","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IO
 Exception":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean areSymlinksEnabled()":{"name":"areSymlinksEnabled","returnType":"boolean","args":[],"exceptions":[]},"boolean createNewFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createNewFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) th
 rows java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"boolean deleteOnExit(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"deleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream",
 "args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.FileSystem, org.apache.hadoo
 p.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void printStatistics() throws java.io.IOException":{"name":"printStatistics","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void moveFromLocalFile(org.apache.hadoop.fs
 .Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputSt
 ream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus() throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":[],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"[Lorg.apache.hadoop.security.token.Token; addDelegationTokens(java.lang.String, org.apache.hadoop.security.Credentials) throws java.io.IOException":{"name":"addDelegationTokens","returnType":"[Lorg.apache.hadoop.security.token.Token;","args":["java.lang.String","org.apache.hadoop.security.Credentials"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void comple
 teLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void enableSymlinks()":{"name":"enableSymlinks","returnType":"void","args":[],"exceptions":[]},"void moveToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToLocalFile","returnType":"void","args":["org.
 apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksu
 m","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean delete(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"delete","returnType":"
 boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isDirectory(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isDirectory","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.Strin
 g, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"boolean cancelDeleteOnExit(org.apache.hadoop.fs.Path)":{"name":"cancelDeleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path, or
 g.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getNamed(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getNamed","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnT
 ype":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDa
 taOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTim
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void moveFromLocalFile([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.net.URI getDefaultUri(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultUri","returnType":"java.net.URI","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long"],"exceptions":["java.io.IOException"]}
 ,"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.I
 OException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.a
 pache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","
 org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.ap
 ache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.List getAllStatistics()":{"name":"getAllStatistics","returnType":"java.util.List","args":[],"exceptions":[]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntri
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void closeAllForUGI(org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException":{"name":"closeAllForUGI","returnType":"void","args":["org.apache.hadoop.security.UserGroupInformation"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.net.URI)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apach
 e.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void clearStatistics()":{"name":"clearStatistics","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.
 apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.I
 OException"]},"org.apache.hadoop.fs.FileSystem newInstance(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(o
 rg.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"long getLength(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getCanonicalServiceName()":{"name":"getCanonicalServiceName","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockSize(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getReplication(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"mo
 difyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFound
 Exception"]},"org.apache.hadoop.fs.LocalFileSystem newInstanceLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstanceLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"boolean exists(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"exists","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOut
 putStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void closeAll() throws java.io.IOException":{"name":"closeAll","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;) throws java.io.IOException, java.io
 .FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions"
 :["java.io.IOException"]}}},"org.apache.hadoop.fs.BlockLocation":{"name":"org.apache.hadoop.fs.BlockLocation","methods":{"[Ljava.lang.String; getCachedHosts()":{"name":"getCachedHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setTopologyPaths([Ljava.lang.String;) throws java.io.IOException":{"name":"setTopologyPaths","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setHosts([Ljava.lang.String;) throws java.io.IOException":{"name":"setHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setCorrupt(boolean)":{"name":"setCorrupt","returnType":"void","args":["boolean"],"exceptions":[]},"[Ljava.lang.String; getNames() throws java.io.IOException":{"name":"getNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getTopologyPaths() throws java.io.IOException":{"name":"getTopologyPaths","returnType":"[Ljava.lang.Str
 ing;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"long getOffset()":{"name":"getOffset","returnType":"long","args":[],"exceptions":[]},"void setOffset(long)":{"name":"setOffset","returnType":"void","args":["long"],"exceptions":[]},"void setNames([Ljava.lang.String;) throws java.io.IOException":{"name":"setNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setLength(long)":{"name":"setLength","returnType":"void","args":["long"],"exceptions":[]},"[Ljava.lang.String; getHosts() throws java.io.IOException":{"name":"getHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"boolean isCorrupt()":{"name":"isCorrupt","returnType":"boolean","args":[],"exceptions":[]},"void setCachedHosts([Ljava.lang.String;)":{"
 name":"setCachedHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]}}},"org.apache.hadoop.io.Text":{"name":"org.apache.hadoop.io.Text","methods":{"java.lang.String readString(java.io.DataInput, int) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(org.apache.hadoop.io.Text)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Text"],"exceptions":[]},"void validateUTF8([B, int, int) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B","int","int"],"exceptions":["java.nio.charset.MalformedInputException"]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void readFields(java.io.DataInput, int) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["j
 ava.io.DataInput","int"],"exceptions":["java.io.IOException"]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"int bytesToCodePoint(java.nio.ByteBuffer)":{"name":"bytesToCodePoint","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String decode([B, int, int, boolean) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String, boolean) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String","boolean"],"exceptions":["java.nio.char
 set.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String, int) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String","int"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B, int, int) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeS
 tring(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void write(java.io.DataOutput, int) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"void set(java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int utf8Length(java.lang.String)":{"name":"utf8Length","returnType":"int","args":["java.lang.String"],"exceptions":[]},"void readWithKnownLength(java.io.DataInput, int) throws java.io.IOException":{"name":"readWithKnownLength","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.i
 o.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B"],"exceptions":["java.nio.charset.CharacterCodingException"]},"void skip(java.io.DataInput) throws java.io.IOException":{"name":"skip","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int charAt(int)":{"name":"charAt","returnType":"int","args":["int"],"exceptions":[]},"int find(java.lang.String, int)":{"name":"find","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"int find(java.lang.String)":{"name":"find","returnType":"int","args":["java.lang.String"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]}
 ,"void validateUTF8([B) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B"],"exceptions":["java.nio.charset.MalformedInputException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Writable":{"name":"org.apache.hadoop.io.Writable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VLongWritable":{"name":"org.apache.hadoop.io.VLongWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","return
 Type":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VLongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VLongWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VersionedWritable
 ":{"name":"org.apache.hadoop.io.VersionedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte getVersion()":{"name":"getVersion","returnType":"byte","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.SequenceFile":{"name":"org.apache.hadoop.io.SequenceFile","methods":{"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configu
 ration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileContext, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, [Lorg.apache.hadoop.fs.Options$CreateOpts;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileContext","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata","java.util.EnumSet","[Lorg.apache.hadoop.
 fs.Options$CreateOpts;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, [Lorg.apache.hadoop.io.SequenceFile$Writer$Option;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","[Lorg.apache.hadoop.io.SequenceFile$Writer$Option;"],"exceptions":["java.io.IOException"]},"void setDefaultCompressionType(org.apache.hadoop.conf.Configuration, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setDefaultCompressionType","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.io.SequenceFile$Compressi
 onType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","r
 eturnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOExcepti
 on"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, org.apache.hadoop.io.Seq
 uenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","boolean","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.l
 ang.Class","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.Compr
 essionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs
 .Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists","methods":{}},"org.apache.hadoop.fs.FileStatus":{"name":"org.apache.hadoop.fs.FileStatus","methods":{"org.apache.hadoop.fs.permission.FsPermission getPermission()":{"name":"getPermission","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"
 exceptions":[]},"boolean isFile()":{"name":"isFile","returnType":"boolean","args":[],"exceptions":[]},"long getBlockSize()":{"name":"getBlockSize","returnType":"long","args":[],"exceptions":[]},"java.lang.String getOwner()":{"name":"getOwner","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setSymlink(org.apache.hadoop.fs.Path)":{"name":"setSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"long getAccessTime()":{"name":"getAccessTime","returnType":"long","args":[],"exceptions":[]},"boolean isDir()":{"name":"isDir","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isEncr
 ypted()":{"name":"isEncrypted","returnType":"boolean","args":[],"exceptions":[]},"long getLen()":{"name":"getLen","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setPath(org.apache.hadoop.fs.Path)":{"name":"setPath","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getSymlink() throws java.io.IOException":{"name":"getSymlink","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"short getReplication()":{"name":"getReplication","returnType":"short","args":[],"exceptions":[]},"boolean isDirectory()":{"name":"isDirectory","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getGroup()":{"name":"getGroup","returnTy
 pe":"java.lang.String","args":[],"exceptions":[]},"boolean isSymlink()":{"name":"isSymlink","returnType":"boolean","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getModificationTime()":{"name":"getModificationTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.PureJavaCrc32":{"name":"org.apache.hadoop.util.PureJavaCrc32","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.fs.Trash":{"name":"org.apache.hadoop.fs.Trash","methods":{"java.lang.Runnable getEmptier() throws j
 ava.io.IOException":{"name":"getEmptier","returnType":"java.lang.Runnable","args":[],"exceptions":["java.io.IOException"]},"boolean moveToTrash(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToTrash","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void expunge() throws java.io.IOException":{"name":"expunge","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean moveToAppropriateTrash(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"moveToAppropriateTrash","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void checkpoint() throws java.io.IOException":{"name":"checkpoint","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean isEnabled()":{"name":"isEnabled","returnT
 ype":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.record.RecordComparator":{"name":"org.apache.hadoop.record.RecordComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.record.RecordComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.record.RecordComparator"],"exceptions":[]}}},"org.apache.hadoop.record.meta.RecordTypeInfo":{"name":"org.apache.hadoop.record.meta.RecordTypeInfo","methods":{"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"void serialize(org.apache.hadoop.rec
 ord.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"void addField(java.lang.String, org.apache.hadoop.record.meta.TypeID)":{"name":"addField","returnType":"void","args":["java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":[]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"org.apache.hadoop.record.meta.RecordTypeInfo getNestedStructTypeInfo(java.lang.String)":{"name":"getNestedStructTypeInfo","returnType":"org.apache.hadoop.reco
 rd.meta.RecordTypeInfo","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.conf.Configuration":{"name":"org.apache.hadoop.conf.Configuration","methods":{"void addResource(org.apache.hadoop.fs.Path)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"java.util.Set getFinalParameters()":{"name":"getFinalParameters","returnType":"java.util.Set","args":[],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String, java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setPattern(java.lang.String, java.util.regex.Pattern)":{"name":"setPattern","returnType":"void","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void addResource(org.apache.hadoop.conf.Configuration)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.conf.Conf
 iguration"],"exceptions":[]},"java.util.List getInstances(java.lang.String, java.lang.Class)":{"name":"getInstances","returnType":"java.util.List","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void addResource(java.net.URL)":{"name":"addResource","returnType":"void","args":["java.net.URL"],"exceptions":[]},"void setFloat(java.lang.String, float)":{"name":"setFloat","returnType":"void","args":["java.lang.String","float"],"exceptions":[]},"void set(java.lang.String, java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void setBooleanIfUnset(java.lang.String, boolean)":{"name":"setBooleanIfUnset","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"void reloadConfiguration()":{"name":"reloadConfiguration","returnType":"void","args":[],"exceptions":[]},"java.util.regex.Pattern getPattern(java.lang.String, java.util.regex.Pattern)":{"name":"getPat
 tern","returnType":"java.util.regex.Pattern","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"java.lang.String get(java.lang.String, java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setDeprecatedProperties()":{"name":"setDeprecatedProperties","returnType":"void","args":[],"exceptions":[]},"boolean onlyKeyExists(java.lang.String)":{"name":"onlyKeyExists","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"ge
 tLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.Class getClassByName(java.lang.String) throws java.lang.ClassNotFoundException":{"name":"getClassByName","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":["java.lang.ClassNotFoundException"]},"java.io.InputStream getConfResourceAsInputStream(java.lang.String)":{"name":"getConfResourceAsInputStream","returnType":"java.io.InputStream","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void writeXml(java.io.Writer) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.Writer"],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"java.net.URL getResource(java.lang.Str
 ing)":{"name":"getResource","returnType":"java.net.URL","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.lang.String, java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"boolean getBoolean(java.lang.String, boolean)":{"name":"getBoolean","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.Enum getEnum(java.lang.String, java.lang.Enum)":{"name":"getEnum","returnType":"java.lang.Enum","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void set(java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.Strin
 g","java.lang.String"],"exceptions":[]},"void setEnum(java.lang.String, java.lang.Enum)":{"name":"setEnum","returnType":"void","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"[Ljava.lang.Class; getClasses(java.lang.String, [Ljava.lang.Class;)":{"name":"getClasses","returnType":"[Ljava.lang.Class;","args":["java.lang.String","[Ljava.lang.Class;"],"exceptions":[]},"float getFloat(java.lang.String, float)":{"name":"getFloat","returnType":"float","args":["java.lang.String","float"],"exceptions":[]},"long getLongBytes(java.lang.String, long)":{"name":"getLongBytes","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"java.lang.Class getClassByNameOrNull(java.lang.String)":{"name":"getClassByNameOrNull","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":[]},"void setStr
 ings(java.lang.String, [Ljava.lang.String;)":{"name":"setStrings","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void addDeprecations([Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;)":{"name":"addDeprecations","returnType":"void","args":["[Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;"],"exceptions":[]},"[Ljava.lang.String; getPropertySources(java.lang.String)":{"name":"getPropertySources","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getRange(java.lang.String, java.lang.String)":{"name":"getRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setLong(java.lang.String, long)":{"name":"setLong","returnType":"void","args":["java.lang.String","long"],"exceptions":[]},"void setQuietMode(boolean)":{"name":"setQuietMode","returnType":"void","args":["boolean
 "],"exceptions":[]},"void setClassLoader(java.lang.ClassLoader)":{"name":"setClassLoader","returnType":"void","args":["java.lang.ClassLoader"],"exceptions":[]},"[C getPassword(java.lang.String) throws java.io.IOException":{"name":"getPassword","returnType":"[C","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"setTimeDuration","returnType":"void","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void setDouble(java.lang.String, double)":{"name":"setDouble","returnType":"void","args":["java.lang.String","double"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;","java.lang.String"],"exceptions":[]},"java.lang.String get(java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String"]
 ,"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void setClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"setClass","returnType":"void","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"java.util.Collection getStringCollection(java.lang.String)":{"name":"getStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.io.File getFile(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getFile","returnType":"java.io.File","args":["java.lang.String","java.lang.St
 ring"],"exceptions":["java.io.IOException"]},"double getDouble(java.lang.String, double)":{"name":"getDouble","returnType":"double","args":["java.lang.String","double"],"exceptions":[]},"void setBoolean(java.lang.String, boolean)":{"name":"setBoolean","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isDeprecated(java.lang.String)":{"name":"isDeprecated","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void setInt(java.lang.String, int)":{"name":"setInt","returnType":"void","args":["java.lang.String","int"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"long getLong(java.lang.String, long)":{"name":"getLong","retur
 nType":"long","args":["java.lang.String","long"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setAllowNullValueProperties(boolean)":{"name":"setAllowNullValueProperties","returnType":"void","args":["boolean"],"exceptions":[]},"java.util.Collection getTrimmedStringCollection(java.lang.String)":{"name":"getTrimmedStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.io.Reader getConfResourceAsReader(java.lang.String)":{"name":"getConfResourceAsReader","returnType":"java.io.Reader","args":["java.lang.String"],"exceptions":[]},"long getTimeDuration(java.lang.String, long, java.util.concurrent.Time
 Unit)":{"name":"getTimeDuration","returnType":"long","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void addResource(java.io.InputStream)":{"name":"addResource","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","int"],"exceptions":[]},"void dumpDeprecatedKeys()":{"name":"dumpDeprecatedKeys","returnType":"void","args":[],"exceptions":[]},"[I getInts(java.lang.String)":{"name":"getInts","returnType":"[I","args":["java.lang.String"],"exceptions":[]},"void addResource(java.lang.String)":{"name":"addResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.
 String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"void setIfUnset(java.lang.String, java.lang.String)":{"name":"setIfUnset","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void unset(java.lang.String)":{"name":"unset","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void dumpConfiguration(org.apache.hadoop.conf.Configuration, java.io.Writer) throws java.io.IOException":{"name":"dumpConfiguration","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.Writer"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getStrings(java.lang.String)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void addResource(java.io.Input
 Stream, java.lang.String)":{"name":"addResource","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"java.util.Map getValByRegex(java.lang.String)":{"name":"getValByRegex","returnType":"java.util.Map","args":["java.lang.String"],"exceptions":[]},"void setSocketAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"setSocketAddr","returnType":"void","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"int getInt(java.lang.String, int)":{"name":"getInt","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void writeXml(java.io.OutputStream) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.OutputStream"],"exceptions":["java.io.IOException"]},"java.lang.ClassLoader getClassLoader()":{"name":"getClassLoader","returnType":"java.lang.ClassLoader","args":[],"exceptions":[]},"void addDefaultResource(java.lang.String)":{"name":"addDefaultResource","returnType":"void","args":["j
 ava.lang.String"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","int"],"exceptions":[]},"boolean hasWarnedDeprecation(java.lang.String)":{"name":"hasWarnedDeprecation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRaw(java.lang.String)":{"name":"getRaw","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableFactories":{"name":"org.apache.hadoop.io.WritableFactories","methods":{"org.apache.hadoop.io.WritableFactory getFactory(java.lang.Class)":{"name":"getFactory","returnType":"org.apache.hadoop.io.WritableFactory","args":["java.lang.Class"],"exceptions":[
 ]},"void setFactory(java.lang.Class, org.apache.hadoop.io.WritableFactory)":{"name":"setFactory","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableFactory"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.io.SetFile":{"name":"org.apache.hadoop.io.SetFile","methods":{}},"org.apache.hadoop.record.compiler.JString":{"name":"org.apache.hadoop.record.compiler.JString","methods":{}},"org.apache.hadoop.record.compiler.JBoolean":{"name":"org.apache.hadoop.record.compiler.JBoolean","methods":{}},"org.apache.hadoop.io.ShortWritable":{"name":"org.apache.hadoop.io.S
 hortWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ShortWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ShortWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"short get()":{"name":"get","returnType":"short","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(short)":{"name":"set","returnType":"void","args":["short"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":
 {"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.InvalidPathException":{"name":"org.apache.hadoop.fs.InvalidPathException","methods":{}},"org.apache.hadoop.record.compiler.JVector":{"name":"org.apache.hadoop.record.compiler.JVector","methods":{}},"org.apache.hadoop.io.ArrayWritable":{"name":"org.apache.hadoop.io.ArrayWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void set([Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"[Ljava.lang.String; toStrings()":{"name":"toStrings","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.Class getValu
 eClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.IntWritable":{"name":"org.apache.hadoop.io.IntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.IntWritable)":{"name":"compareTo","returnType":"int","args":["or
 g.apache.hadoop.io.IntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.TwoDArrayWritable":{"name":"org.apache.hadoop.io.TwoDArrayWritable","methods":{"[[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set([[Lorg.apache.hadoop.io.Writable;)":{"name":"set","retur
 nType":"void","args":["[[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataInputStream":{"name":"org.apache.hadoop.fs.FSDataInputStream","methods":{"void readFully(long, [B) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B"],"exceptions":["java.io.IOException"]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void readFully(long, [B, int, int) throws java.io.IOException":{"n
 ame":"readFully","returnType":"void","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void unbuffer()":{"name":"unbuffer","returnType":"void","args":[],"exceptions":[]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void setReadahead(java.lang.Long) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setReadahead","returnType":"void","args":["java.lang.Long"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void releaseBuffer(java.nio.ByteBuffer)":{"name":"releaseBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.io.InputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.InputStream","args":[],"exceptions":[]},"java.nio.ByteBuffer read(
 org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","a

<TRUNCATED>

[23/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

(cherry picked from commit b67df8e394192ca9604a8b31a6b4fbbb4bb07425)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/4f19c159
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/4f19c159
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/4f19c159

Branch: refs/heads/master
Commit: 4f19c159fafc67e3475a300ec9323d5eab03c748
Parents: 29eebd0
Author: Alan Gates <al...@gmail.com>
Authored: Wed Mar 8 15:10:19 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:14 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle    |  2 +-
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  |  8 +--
 .../src/main/resources/api-examiner-checker.sh  | 52 ++++++++++++++++++++
 .../src/main/resources/api-examiner-prep.sh     | 44 +++++++++++++++++
 .../hadoop-common-2.7.3-api-report.json         |  1 +
 .../resources/hadoop-hdfs-2.7.3-api-report.json |  1 +
 ...-mapreduce-client-core-2.7.3-api-report.json |  1 +
 .../hadoop-yarn-api-2.7.3-api-report.json       |  1 +
 .../hadoop-yarn-client-2.7.3-api-report.json    |  1 +
 .../hadoop-yarn-common-2.7.3-api-report.json    |  1 +
 10 files changed, 104 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index 0eadd96..97e3635 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -40,7 +40,7 @@ dependencies {
   testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
   testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
   testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
-    testCompile "junit:junit:4.11"
+  testCompile "junit:junit:4.11"
   if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
 }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index c49be13..2ae97a2 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -51,7 +51,6 @@ public class ApiExaminer {
   private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
 
   static private Set<String> unloadableClasses;
-  //static private List<String> jarsWeCareAbout;
 
   private List<String> errors;
   private List<String> warnings;
@@ -65,17 +64,12 @@ public class ApiExaminer {
 
     /*
     jarsWeCareAbout = new ArrayList<>();
+    jarsWeCareAbout.add("hadoop-common");
     jarsWeCareAbout.add("hadoop-hdfs");
     jarsWeCareAbout.add("hadoop-yarn-common");
     jarsWeCareAbout.add("hadoop-yarn-client");
     jarsWeCareAbout.add("hadoop-yarn-api");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-app");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-common");
     jarsWeCareAbout.add("hadoop-mapreduce-client-core");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-hs");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-hs-plugins");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-jobclient");
-    jarsWeCareAbout.add("hadoop-mapreduce-client-shuffle");
     */
   }
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
new file mode 100755
index 0000000..1cb9583
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-checker.sh
@@ -0,0 +1,52 @@
+#!/usr/bin/env bash
+
+############################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# <p>
+# http://www.apache.org/licenses/LICENSE-2.0
+# <p>
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+############################################################################
+
+function usage() {
+    echo "You must set the following variables:  HADOOP_COMMON_HOME HADOOP_COMMON_DIR HADOOP_COMMON_LIB_JARS_DIR "
+    echo "HADOOP_HDFS_HOME HDFS_DIR HDFS_LIB_JARS_DIR HADOOP_YARN_HOME YARN_DIR YARN_LIB_JARS_DIR "
+    echo "HADOOP_MAPRED_HOME MAPRED_DIR MAPRED_LIB_JARS_DIR BIGTOP_HOME (location of bigtop source)"
+    echo "You can get the Hadoop environment variables by using hadoop envvars, hdfs envvars, yarn envvars, and mapred envvars"
+}
+
+for envar in x$HADOOP_COMMON_HOME x$HADOOP_COMMON_DIR x$HADOOP_COMMON_LIB_JARS_DIR x$HADOOP_HDFS_HOME x$HDFS_DIR \
+             x$HDFS_LIB_JARS_DIR x$HADOOP_YARN_HOME x$YARN_DIR x$YARN_LIB_JARS_DIR x$HADOOP_MAPRED_HOME x$MAPRED_DIR \
+             x$MAPRED_LIB_JARS_DIR
+do
+    if [ "${envar}" = "x" ]
+    then
+        usage
+        exit 1
+    fi
+done
+
+
+for dir in $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ $HADOOP_COMMON_HOME/$HADOOP_COMMON_DIR \
+            $HADOOP_COMMON_HOME/$HADOOP_COMMON_LIB_JARS_DIR $HADOOP_HDFS_HOME/$HDFS_DIR \
+            $HADOOP_HDFS_HOME/$HDFS_LIB_JARS_DIR $HADOOP_YARN_HOME/$YARN_DIR $HADOOP_YARN_HOME/$YARN_LIB_JARS_DIR \
+            $HADOOP_MAPRED_HOME/$MAPRED_DIR $HADOOP_MAPRED_HOME/$MAPRED_LIB_JARS_DIR
+do
+    for jar in `find $dir -name \*.jar`
+    do
+        CLASSPATH=$CLASSPATH:$jar
+    done
+done
+
+java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
new file mode 100755
index 0000000..0bdfe05
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+
+############################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# <p>
+# http://www.apache.org/licenses/LICENSE-2.0
+# <p>
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+############################################################################
+
+if [ "x${APACHE_HADOOP_DIR}" = "x" ]
+then
+    echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"
+    exit 1
+fi
+
+if [ "x${BIGTOP_HOME}" = "x" ]
+then
+    echo "You must set BIGTOP_HOME to the root directory for your bigtop source"
+    exit 1
+fi
+
+for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+for jar in `find $APACHE_HADOOP_DIR -name \*.jar`
+do
+    CLASSPATH=$CLASSPATH:$jar
+done
+
+java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
+


[24/50] [abbrv] bigtop git commit: Include keys for arguments passed to HCatalogMR (cherry picked from commit 1b6c6c94aa05c9c5f3028b4c957d24ab3db33055)

Posted by rv...@apache.org.
Include keys for arguments passed to HCatalogMR
(cherry picked from commit 1b6c6c94aa05c9c5f3028b4c957d24ab3db33055)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/f0e2e03c
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/f0e2e03c
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/f0e2e03c

Branch: refs/heads/master
Commit: f0e2e03c8be698438d618a3056533bfb10c2514d
Parents: dcd9cb2
Author: Raj Desai <rd...@us.ibm.com>
Authored: Thu Jan 19 15:12:12 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:14 2017 -0700

----------------------------------------------------------------------
 .../src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java  | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/f0e2e03c/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index b51db02..0ea49ce 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -140,9 +140,13 @@ public class TestHCatalog {
         .addArgument("jar")
         .addArgument(System.getProperty(JOBJAR))
         .addArgument(HCatalogMR.class.getName())
+        .addArgument("-it")
         .addArgument(inputTable)
+        .addArgument("-ot")
         .addArgument(outputTable)
+        .addArgument("-is")
         .addArgument(inputSchema.getSchemaAsTypeString())
+        .addArgument("-os")
         .addArgument(outputSchema.getSchemaAsTypeString()), env);
     LOG.info(results.toString());
     Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));


[42/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
new file mode 100644
index 0000000..6061c5e
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-mapreduce-client-core","version":"2.7.3","classes":{"org.apache.hadoop.mapred.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapred.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configu
 ration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileSplit":{"name":"org.apache.hadoop.mapred.lib.CombineFileSplit","methods":{"org.apache.hadoop.mapred.JobConf getJob()":{"name":"getJob","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) t
 hrows java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"except
 ions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.HashPartitioner":{"name":"org.apache.hadoop.mapred.lib.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputFormat":{"name":"org.apache.hadoop.mapreduce.OutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.CounterGroup":{"name":"org.apache.hadoop.mapreduce.CounterGroup","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Clas
 s;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void setAggregatorDescriptors(org.apache.hadoop.mapred.JobConf, [Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Ljava.lang.Class;"],"exceptions":[]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void main([Ljava.lang.String;) throws java.io.IOException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.la
 ng.Class;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.J
 obConf","args":["[Ljava.lang.String;","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.InvalidInputException":{"name":"org.apache.hadoop.mapreduce.lib.input.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters$Counter":{"name
 ":"org.apache.hadoop.mapred.Counters$Counter","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean contentEquals(org.apache.hadoop.mapred.Counters$Counter)":{"name":"contentEquals","returnType":"boolean","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getName()":{"name"
 :"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getCounter()":{"name":"getCounter","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper","methods":{"long getPos() throws java.io.IOException"
 :{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LongSumReducer":{"name":"org.apache.hadoop.mapred.lib.LongSumReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.had
 oop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength(int)":{"name":"getLength","returnType":"long","args":["int"],"exceptions":[]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getPaths()":{"name":"getPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.
 DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getOffset(int)":{"name":"getOffset","returnType":"long","args":["int"],"exceptions":[]},"org.apache.hadoop.fs.Path getPath(int)":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":["int"],"exceptions":[]},"[J getLengths()":{"name":"getLengths","returnType":"[J","args":[],"exceptions":[]},"[J getStartOffsets()":{"name":"getStartOffsets","returnType":"[J","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumPaths()":{"name":"getNumPaths","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","methods":{"java.lang.String getInputQuery()":{"name":"getInputQuery","re
 turnType":"java.lang.String","args":[],"exceptions":[]},"void setInputClass(java.lang.Class)":{"name":"setInputClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setOutputFieldCount(int)":{"name":"setOutputFieldCount","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getInputTableName()":{"name":"getInputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"[Ljava.lang.String; getInputFieldNames()":{"name":"getInputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setOutputTableName(java.lang.String)":{"name":"setOutputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.sql.Connection getConnection() throws java.sql.SQLException, java.lang.ClassNotFoundException":{"name":"getConnection","returnType":"java.sql.Connectio
 n","args":[],"exceptions":["java.sql.SQLException","java.lang.ClassNotFoundException"]},"java.lang.String getInputBoundingQuery()":{"name":"getInputBoundingQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getInputOrderBy()":{"name":"getInputOrderBy","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getInputClass()":{"name":"getInputClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setInputTableName(java.lang.String)":{"name":"setInputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputCountQuery(java.lang.String)":{"name":"setInputCountQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputOrderBy(java.lang.String)":{"name":"setInputOrderBy","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getOutputFieldCount()":{"name":"getOutputFieldCount","returnType":"int","args":[],"exceptions":[]},"void setInputConditions(ja
 va.lang.String)":{"name":"setInputConditions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputQuery(java.lang.String)":{"name":"setInputQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getInputConditions()":{"name":"getInputConditions","returnType":"java.lang.String","args":[],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String"],"exceptions":[]},"void setInputBoundingQuery(java.lang.String)":{"name":"setInputBoundingQuery","
 returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputFieldNames([Ljava.lang.String;)":{"name":"setInputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"[Ljava.lang.String; getOutputFieldNames()":{"name":"getOutputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.String getOutputTableName()":{"name":"getOutputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setOutputFieldNames([Ljava.lang.String;)":{"name":"setOutputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"java.lang.String getInputCountQuery()":{"name":"getInputCountQuery","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.Partitioner":{"name":"org.apache.hadoop.mapred.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int
 "],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOE
 xception":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],
 "exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getCurrentValue()":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType"
 :"float","args":[],"exceptions":["java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void setKeyValue(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, [B, int, int)":{"name":"setKeyValue","returnType":"void","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text","[B","int","int"],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","
 returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":
 ["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainMapper":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.RecordReader":{"name":"org.apache.hadoop.mapred.RecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.had
 oop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileOutputFormat":{"name":"org.apache.hadoop.mapred.FileOutputFormat","methods":{"void setOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","o
 rg.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getTaskOutputPath(org.apache.hadoop.mapred.JobConf, java.lang.String) throws java.io.IOException":{"name":"getTaskOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":["java.io.IOException"]},"void setOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void setCompressOutput(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"java.lang.Stri
 ng getUniqueName(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getUniqueName","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getPathForCustomFile(org.apache.hadoop.mapred.JobConf, java.lang.String)"
 :{"name":"getPathForCustomFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setWorkOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setWorkOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean getCompressOutput(org.apache.hadoop.mapred.JobConf)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","retur
 nType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException
 , java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},
 "float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader) throws java.lang.In
 terruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnTyp
 e":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.JoinRecordReader":{"name":"org.apache.hadoop.mapred.join.JoinRecordReader","methods":{"org.apache.hadoop.mapred.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapred.join.TupleWritable","args":[],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.mapred.join.TupleWritable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.mapred.join.TupleWritable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java
 .lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.TextOutputFormat":{"name":"org.apache.hadoop.mapred.TextOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf,
  java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.pipes.Submitter":{"name":"org.apache.hadoop.mapred.pipes.Submitter","methods":{"boolean getKeepCommandFile(org.apache.hadoop.mapred.JobConf)":{"name":"getKeepCommandFile","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob jobSubmit(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"jobSubmit","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaMapper(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaMapper","returnType":"void","arg
 s":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordWriter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean getIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordWriter","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaReducer(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaReducer","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.Job
 Conf"],"exceptions":["java.io.IOException"]},"void setIsJavaRecordReader(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordReader","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.String getExecutable(org.apache.hadoop.mapred.JobConf)":{"name":"getExecutable","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void setKeepCommandFile(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setKeepCommandFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaReducer(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setExecutable(org.apache.hadoop.m
 apred.JobConf, java.lang.String)":{"name":"setExecutable","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"boolean getIsJavaMapper(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaMapper","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaRecordReader(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordReader","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache
 .hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"
 ]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.ClusterStatus":{"name":"org.apache.hadoop.mapred.ClusterStatus","methods":{"int getTaskTrackers()":{"name":"getTaskTrackers","returnType":"int","args":[],"exceptions":[]},"int getMaxMapTasks()":{"name":"getMaxMapTasks","returnType":"int","args":[],"exceptions":[]},"long getMaxMemory()":{"name":"getMaxMemory","returnType":"long","args":[],"exceptions":[]},"int getMaxReduceTasks()":{"name":"getMaxReduceTasks","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getGraylistedTrackerNames()":{"name":"getGraylistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatu
 s getJobTrackerStatus()":{"name":"getJobTrackerStatus","returnType":"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus","args":[],"exceptions":[]},"int getReduceTasks()":{"name":"getReduceTasks","returnType":"int","args":[],"exceptions":[]},"int getGraylistedTrackers()":{"name":"getGraylistedTrackers","returnType":"int","args":[],"exceptions":[]},"long getTTExpiryInterval()":{"name":"getTTExpiryInterval","returnType":"long","args":[],"exceptions":[]},"long getUsedMemory()":{"name":"getUsedMemory","returnType":"long","args":[],"exceptions":[]},"java.util.Collection getActiveTrackerNames()":{"name":"getActiveTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getMapTasks()":{"name":"getMapTasks","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobTracker$State getJobTr
 ackerState()":{"name":"getJobTrackerState","returnType":"org.apache.hadoop.mapred.JobTracker$State","args":[],"exceptions":[]},"int getBlacklistedTrackers()":{"name":"getBlacklistedTrackers","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getBlacklistedTrackerNames()":{"name":"getBlacklistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.util.Collection getBlackListedTrackersInfo()":{"name":"getBlackListedTrackersInfo","returnType":"java.util.Collection","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumExcludedNodes()":{"name":"getNumExcludedNodes","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.MapReduceBase":{"name":"org.apache.hadoop.mapred.MapReduceBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","retur
 nType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.TupleWritable":{"name":"org.apache.hadoop.mapred.join.TupleWritable","methods":{}},"org.apache.hadoop.mapred.ID":{"name":"org.apache.hadoop.mapred.ID","methods":{}},"org.apache.hadoop.mapred.lib.RegexMapper":{"name":"org.apache.hadoop.mapred.lib.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.
 io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator":{"name":"org.ap
 ache.hadoop.mapred.lib.aggregate.ValueAggregator","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String, long)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","args":["java.lang.String","long"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnTy
 pe":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void addNextValue(double)":{"name":"addNextValue","returnType":"void","args":["double"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"double getSum()":{"name":"getSum","returnType":"double","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggr
 egate.LongValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputSplit":{"name":"org.apache.hadoop.mapreduce.InputSplit","methods":{"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"[Ljava
 .lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapred.lib.TotalOrderPartitioner","methods":{"void setPartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.mapred.JobConf)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoo
 p.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Counter":{"name":"org.apache.hadoop.mapreduce.Counter","methods":{"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":
 [],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapreduce.lib.db.DBConfiguration getDBConf()":{"name":"getDBConf","returnType":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setC
 onf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.sql.Connection createConnection()":{"name":"createConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.sql.Connection getConnection()":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"java.lang.String getDBProductName()":{"name":"getDBProductName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lan
 g.Class","java.lang.String","java.lang.String"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boo
 lean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapred.join.CompositeInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Str
 ing, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setFormat(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"
 java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Partitioner":{"name":"org.apache.hadoop.mapreduce.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext)":{"name":"checkOutputSpe
 cs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iter
 able","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent$Status":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","methods":{"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.JobContext":{"name":"org.apache.hadoop.mapred.JobContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.had
 oop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputCommitter":{"name":"org.apache.hadoop.mapreduce.OutputCommitter","methods":{"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args
 ":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJ
 ob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram","methods":{}},"o
 rg.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputS
 plit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.InputSplit":{"name":"org.apache.hadoop.mapred.InputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"
 name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"long getSum()":{"name":"getSum","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue()":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProg
 ress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey()":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.A
 rrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader","methods":{"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.j
 obcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration setAggregatorDescriptors([Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"org.apache.hadoop.conf.Configuration","args":["[Ljava.lang.Class;"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"org.apache.hadoop.mapreduce.Job createValueA
 ggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob(org.apache.hadoop.conf.Configuration, [Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMin","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueSum","methods":{}},"org.apache.hadoop.mapred.JobID":{"name":"org.apache.hadoop.mapred.JobID","methods":{"java.lang.String getJobIDsPattern(java.lang.String, java.lan
 g.Integer)":{"name":"getJobIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapred.JobID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.JobID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID downgrade(org.apache.hadoop.mapreduce.JobID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobID","args":["org.apache.hadoop.mapreduce.JobID"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.FileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptio
 ns":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","arg
 s":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer
 $Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMax","methods":{}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache
 .hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apac
 he.hadoop.mapred.Reducer":{"name":"org.apache.hadoop.mapred.Reducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.JobControl":{"name":"org.apache.hadoop.mapred.jobcontrol.JobControl","methods":{"java.util.ArrayList getReadyJobs()":{"name":"getReadyJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getFailedJobs()":{"name":"getFailedJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getSuccessfulJobs()":{"name":"getSuccessfulJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getWaitingJobs()":{"name":"getWaitingJob
 s","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getRunningJobs()":{"name":"getRunningJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addJobs(java.util.Collection)":{"name":"addJobs","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ResetableIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ResetableIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay
 (org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(org.apache.hadoo
 p.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapred.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocation
 (int) throws java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapred.InputSplit) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.InputSplit"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapred.InputSplit","args":["int"],"exceptions":[]},"long getLength(int) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"re
 adFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.BinaryPartitioner":{"name":"org.apache.hadoop.mapred.lib.BinaryPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator","methods":{"void setKeyFieldComparatorOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Conf
 iguration","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.MultiFileSplit":{"name":"org.apache.hadoop.mapred.MultiFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobQueueInfo":{"name":"org.apache.hadoop.mapred.JobQueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJobStatuses([Lorg.apache.hadoop.mapreduce.JobStatus;)":{"name":"setJobStatuses","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":[]},"void setChildren(java.util.List)":{"name":"setChildren","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getChildren()":{"name":"getChildren","returnType":"java.util.List","args":[],"exceptions":[]
 },"void setQueueState(java.lang.String)":{"name":"setQueueState","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getQueueState()":{"name":"getQueueState","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setProperties(java.util.Properties)":{"name":"setProperties","returnType":"void","args":["java.util.Properties"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, [Ljava.lang.String;)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":
 "checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, int)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","int"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordWriter":{"name":"org.apache.hadoop.mapred.RecordWriter","methods":{"void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"
 close","returnType":"void","args":["org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"org.apache.hadoop.mapred.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader","methods":{"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable cre
 ateValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.TupleWritable":{"name":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(int)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lan
 g.Object"],"exceptions":[]},"boolean has(int)":{"name":"has","returnType":"boolean","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineTextInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.IdentityReducer":{"name":"org.apache.hadoop.mapred.lib.IdentityReducer","methods":{"void reduce(java.lang.Object, java.util.
 Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskID":{"name":"org.apache.hadoop.mapreduce.TaskID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType(char)":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["char"],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getAllTaskTypes()":{"name":"getAllTaskTypes","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"char getRepresentingCharacter(org.apache.hadoop.mapreduce.TaskType)":{"name":"getRepresentingCharacter","returnType":"char","args":["org.apache.hadoop.mapreduce.TaskType"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.
 JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.filecache.DistributedCache":{"name":"org.apache.hadoop.filecache.DistributedCache","methods":{"void setLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"long getTimestamp(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getTimestamp","returnType":"long","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOEx
 ception"]},"void setFileTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name

<TRUNCATED>

[10/50] [abbrv] bigtop git commit: changed url username and password strings (cherry picked from commit ef77cfb55129f1ec2f1c96e136405be2a53bd45b)

Posted by rv...@apache.org.
changed url username and password strings
(cherry picked from commit ef77cfb55129f1ec2f1c96e136405be2a53bd45b)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/241c8397
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/241c8397
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/241c8397

Branch: refs/heads/master
Commit: 241c8397215fe42e9e0b3881814b98071065325d
Parents: 55ef768
Author: roypradeep <ro...@us.ibm.com>
Authored: Wed Nov 2 11:40:36 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:11 2017 -0700

----------------------------------------------------------------------
 .../test/java/org/odpi/specs/runtime/hive/TestBeeline.java   | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/241c8397/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index 682949d..578621a 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -31,9 +31,11 @@ import java.util.Map;
 public class TestBeeline {
 	
 	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-	private static final String URL = "odpiHiveTestJdbcUrl";
-	private static final String USER = "odpiHiveTestJdbcUser";
-	private static final String PASSWD = "odpiHiveTestJdbcPassword";
+	
+	private static final String URL = "odpi.test.hive.jdbc.url";
+	private static final String USER = "odpi.test.hive.jdbc.user";
+	private static final String PASSWD = "odpi.test.hive.jdbc.password";
+	
 	private static Map<String, String> results;
 	private static String beelineUrl; 
 	private static String beelineUser;


[38/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy b/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
deleted file mode 100644
index bc2a3b2..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/groovy/org/odpi/specs/runtime/TestSpecsRuntime.groovy
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime
-
-import groovy.io.FileType
-import org.junit.Assert
-import org.apache.bigtop.itest.shell.*
-import org.junit.Test
-import org.junit.runner.RunWith
-import org.junit.runners.Parameterized
-import org.junit.runners.Parameterized.Parameters
-
-import java.util.regex.Matcher
-import java.util.regex.Pattern
-
-/**
- * Check all expected environment
- * Tests are constructed dynamically, using external DSL to define
- * - test name
- * - test type
- * - command to execute the test
- * - expected pattern of the output
- */
-@RunWith(Parameterized.class)
-public class TestSpecsRuntime {
-  private String testName
-  private String type
-  private Map arguments
-
-  private static ENV = System.getenv()
-
-  @Parameters(name="{0}")
-  public static Collection<Object[]> allTests() {
-    List<Object[]> specs = [];
-
-    config.specs.tests.each { test ->
-      specs.add([test.value.name, test.value.type, test.value.arguments] as Object[])
-    }
-    return specs
-  }
-
-  public TestSpecsRuntime (String testName, String type, Map arguments) {
-    this.testName = testName
-    this.type = type
-    this.arguments = arguments
-  }
-
-  public static final String testsList = System.properties['test.resources.dir'] ?:
-      "${System.properties['buildDir']}/resources/test"
-  def final static config = new ConfigSlurper().parse(new URL("file:${getTestConfigName()}"))
-
-  private static String getTestConfigName() {
-    return "$testsList/testRuntimeSpecConf.groovy";
-  }
-
-  private Map getEnvMap(String command) {
-    def envMap = [:]
-    Shell sh = new Shell()
-    def envvars = sh.exec(command).getOut()
-    if (sh.getRet() == 0) {
-      envvars.each {
-        def match = it =~ /(?<variable>[^=]+)='(?<value>[^']+)'$/
-        if ( match.matches() ) {
-          envMap[match.group('variable')] = match.group('value')
-        }
-      }
-    }
-    return envMap
-  }
-
-  private String getEnv(String name, String cmd) {
-    String value = ENV[name]
-    if (value == null) {
-       value = getEnvMap(cmd)[name]
-    }
-    return value
-  }
-
-  @Test
-  public void testAll() {
-    switch (type) {
-      case 'shell':
-        Shell sh = new Shell()
-        def output = sh.exec(arguments['command']).getOut().join("\n")
-        int actualResult = sh.getRet()
-        int expectedResult = arguments['expectedResult'] ? arguments['expectedResult'] : 0 // use 0 as default success code
-        Assert.assertTrue("${testName} fail: ${arguments['message']} - '${arguments['command']}' returned ${actualResult} instead of ${expectedResult}",
-            actualResult == expectedResult)
-        break
-
-      case 'envdir':
-        def var = arguments['variable']
-        def isPathRelative = arguments['relative']
-        def pathString = getEnv(var, arguments['envcmd'])
-        Assert.assertTrue("${testName} fail: environment variable ${var} does not exist", pathString != null )
-
-        if ( arguments['pattern'] ) {
-            Assert.assertTrue("${testName} fail: $pathString doesn't contain expected pattern",
-                pathString ==~ /${arguments['pattern']}/)
-        }
-
-        def pathFile = new File(pathString)
-        if ( isPathRelative ) {
-            Assert.assertFalse("${testName} fail: ${pathString} is not relative", pathFile.isAbsolute() )
-        } else {
-            if (!arguments['donotcheckexistance']) {
-              Assert.assertTrue("${testName} fail: ${pathString} does not exist", pathFile.exists() )
-              Assert.assertTrue("${testName} fail: ${pathString} is not directory", pathFile.isDirectory() )
-            }
-        }
-        break
-
-      case 'dirstruct':
-        def expectedFiles = []
-        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
-           expectedFiles << ~line
-        }
-        def baseDirEnv = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        Assert.assertNotNull("${baseDirEnv} has to be set for the test to continue",
-          baseDirEnv)
-        def root = new File(baseDirEnv)
-        def actualFiles = []
-        def missingFiles = []
-        if ( ! root.exists() ) {
-          Assert.assertFail("${testName} fail: ${baseDirEnv} does not exist!");
-        }
-
-        root.eachFileRecurse(FileType.ANY) { file ->
-          def relPath = new File( root.toURI().relativize( file.toURI() ).toString() ).path
-          actualFiles << relPath
-        }
-
-        expectedFiles.each { wantFile ->
-          def ok = false
-          for (def x : actualFiles) {
-            if (actualFiles =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (!ok) {
-            missingFiles << wantFile
-          }
-        }
-
-        Assert.assertTrue("${testName} fail: Directory structure for ${baseDirEnv} does not match reference. Missing files: ${missingFiles} ",
-          missingFiles.size() == 0)
-        break
-
-      case 'dircontent':
-        def expectedFiles = []
-        new File("${testsList}", "${arguments['referenceList']}").eachLine { line ->
-          expectedFiles << ~line
-        }
-
-        def baseDir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        def subDir = arguments['subDir']
-        if (!subDir && arguments['subDirEnv']) {
-          subDir = getEnv(arguments['subDirEnv'], arguments['envcmd'])
-        }
-
-        def dir = null
-        if (subDir) {
-          dir = new File(baseDir, subDir)
-        } else {
-          dir = new File(baseDir)
-        }
-        Assert.assertNotNull("Directory has to be set for the test to continue", dir)
-
-        def actualFiles = []
-        if (dir.exists()) {
-          dir.eachFile FileType.FILES, { file ->
-            def relPath = new File( dir.toURI().relativize( file.toURI() ).toString() ).path
-            actualFiles << relPath
-          }
-        }
-
-        def missingList = []
-        for (def wantFile : expectedFiles) {
-          def ok = false
-          for (def haveFile : actualFiles) {
-            if (haveFile =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (! ok) {
-            missingList << wantFile
-          }
-        }
-
-        def extraList = []
-        for (def haveFile : actualFiles) {
-          def ok = false
-          for (def wantFile : expectedFiles) {
-            if (haveFile =~ wantFile) {
-              ok = true
-              break
-            }
-          }
-          if (! ok) {
-            extraList << haveFile
-          }
-        }
-
-        def commonFiles = actualFiles.intersect(expectedFiles)
-        Assert.assertTrue("${testName} fail: Directory content for ${dir.path} does not match reference. Missing files: ${missingList}. Extra files: ${extraList}",
-           missingList.size() == 0 && extraList.size() == 0)
-        break
-      case 'hadoop_tools':
-        def toolsPathStr = getEnv("HADOOP_TOOLS_PATH", "hadoop envvars")
-        Assert.assertNotNull("${testName} fail: HADOOP_TOOLS_PATH environment variable should be set", toolsPathStr)
-
-        def toolsPath = new File(toolsPathStr)
-        Assert.assertTrue("${testName} fail: HADOOP_TOOLS_PATH must be an absolute path.", toolsPath.isAbsolute())
-
-        Shell sh = new Shell()
-        def classPath = sh.exec("hadoop classpath").getOut().join("\n")
-        Assert.assertTrue("${testName} fail: Failed to retrieve hadoop's classpath", sh.getRet()==0)
-
-        Assert.assertFalse("${testName} fail: The enire '${toolsPath}' path should not be included in the hadoop's classpath",
-          classPath.split(File.pathSeparator).any {
-            new File(it).getCanonicalPath() =~ /^${toolsPath}\/?\*/
-          }
-        )
-        break
-      case 'api_examination':
-        def basedir = getEnv(arguments['baseDirEnv'], arguments['envcmd'])
-        def libdir = getEnv(arguments['libDir'], arguments['envcmd'])
-
-        def dir = new File(basedir + "/" + libdir)
-        Assert.assertTrue("Expected " + dir.getPath() + " to be a directory", dir.isDirectory())
-        def pattern = Pattern.compile(arguments['jar'] + "-[0-9]+.*\\.jar")
-        def String[] jars = dir.list(new FilenameFilter() {
-          @Override
-          boolean accept(File d, String name) {
-            Matcher matcher = pattern.matcher(name)
-            return (matcher.matches() && !name.contains("test"))
-          }
-        })
-        Assert.assertEquals("Expected only one jar, but got " + jars.join(", "), 1, jars.length)
-        def jar = dir.getAbsolutePath() + "/" + jars[0]
-
-        def examinerJar = System.properties['odpi.test.hive.hcat.job.jar']
-        def resourceFile = System.properties['test.resources.dir']+ "/" + arguments['resourceFile']
-        Shell sh = new Shell()
-        def results = sh.exec("hadoop jar " + examinerJar + " org.odpi.specs.runtime.hadoop.ApiExaminer -c " + resourceFile + " -j " + jar).getErr()
-        int rc = sh.getRet()
-        Assert.assertEquals("Expected command to succeed, but got return code " + rc, 0, rc)
-        if (results.size() > 0) {
-          System.out.println("Received report for jar " + arguments['jar'] + results.join("\n"))
-        }
-        break;
-
-
-      default:
-        break
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
deleted file mode 100644
index 3e56224..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecuteResultHandler;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteWatchdog;
-import org.apache.commons.exec.Executor;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.exec.environment.EnvironmentUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class HiveHelper {
-	
-	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
-
-	public static Map<String, String> execCommand(CommandLine commandline) {
-		return execCommand(commandline, null);
-	}
-
-	public static Map<String, String> execCommand(CommandLine commandline,
-																								Map<String, String> envVars) {
-		
-		System.out.println("Executing command:");
-		System.out.println(commandline.toString());
-		Map<String, String> env = null;
-		Map<String, String> entry = new HashMap<String, String>();
-		try {
-			env = EnvironmentUtils.getProcEnvironment();
-		} catch (IOException e1) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to get process environment: "+ e1.getMessage());
-			e1.printStackTrace();
-		}
-		if (envVars != null) {
-			for (String key : envVars.keySet()) {
-				env.put(key, envVars.get(key));
-			}
-		}
-
-		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
-		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
-		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
-		Executor executor = new DefaultExecutor();
-		executor.setExitValue(1);
-		executor.setWatchdog(watchdog);
-		executor.setStreamHandler(streamHandler);
-		try {
-			executor.execute(commandline, env, resultHandler);
-		} catch (ExecuteException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		}
-		
-		try {
-			resultHandler.waitFor();
-			/*System.out.println("Command output: "+outputStream.toString());*/
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			return entry;
-		} catch (InterruptedException e) {
-			// TODO Auto-generated catch block
-			/*System.out.println("Command output: "+outputStream.toString());*/
-			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			e.printStackTrace();		
-			return entry;
-		}
-	}
-	
-	protected static String getProperty(String property, String description) {
-		String val = System.getProperty(property);
-		if (val == null) {
-			throw new RuntimeException("You must set the property " + property + " with " +
-				description);
-		}
-		LOG.debug(description + " is " + val);
-		return val;
-	 }
-	
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
deleted file mode 100644
index 7512dab..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.util.Properties;
-
-public class JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
-
-  protected static final String URL = "odpi.test.hive.jdbc.url";
-  protected static final String USER = "odpi.test.hive.jdbc.user";
-  protected static final String PASSWD = "odpi.test.hive.jdbc.password";
-  protected static final String LOCATION = "odpi.test.hive.location";
-  protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
-  protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
-  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
-  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
-  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
-
-  protected static Connection conn;
-
-  @BeforeClass
-  public static void connectToJdbc() throws SQLException {
-    // Assume they've put the URL for the JDBC driver in an environment variable.
-    String jdbcUrl = getProperty(URL, "the JDBC URL");
-    String jdbcUser = getProperty(USER, "the JDBC user name");
-    String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
-
-    Properties props = new Properties();
-    props.put("user", jdbcUser);
-    if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
-    conn = DriverManager.getConnection(jdbcUrl, props);
-  }
-
-  @AfterClass
-  public static void closeJdbc() throws SQLException {
-    if (conn != null) conn.close();
-  }
-
-  protected static String getProperty(String property, String description) {
-    String val = System.getProperty(property);
-    if (val == null) {
-      throw new RuntimeException("You must set the property " + property + " with " +
-          description);
-    }
-    LOG.debug(description + " is " + val);
-    return val;
-  }
-
-  protected static boolean testActive(String property, String description) {
-    String val = System.getProperty(property, "true");
-    LOG.debug(description + " is " + val);
-    return Boolean.valueOf(val);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
deleted file mode 100644
index 578621a..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package org.odpi.specs.runtime.hive;
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.util.Map;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-public class TestBeeline {
-	
-	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-	
-	private static final String URL = "odpi.test.hive.jdbc.url";
-	private static final String USER = "odpi.test.hive.jdbc.user";
-	private static final String PASSWD = "odpi.test.hive.jdbc.password";
-	
-	private static Map<String, String> results;
-	private static String beelineUrl; 
-	private static String beelineUser;
-	private static String beelinePasswd;
-	
-	//creating beeline base command with username and password as per inputs
-	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
-
-	@BeforeClass
-	public static void initialSetup(){
-		TestBeeline.beelineUrl = System.getProperty(URL);
-		TestBeeline.beelineUser = System.getProperty(USER);
-		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-
-		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
-		}
-		else if (beelineUser != null && beelineUser != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
-		}
-		else {
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
-		}
-		LOG.info("URL is " + beelineUrl); 
-		LOG.info("User is " + beelineUser);
-		LOG.info("Passwd is " + beelinePasswd); 
-		LOG.info("Passwd is null " + (beelinePasswd == null));
-	}
-
-	@Test
-	public void checkBeeline() {
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineConnect(){
-		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
-		catch (FileNotFoundException e1) {
-			e1.printStackTrace();
-		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
-	}
-	
-	@Test
-	public void checkBeelineHelp(){
-		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromCmdLine(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-		
-		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-		
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineInitFile() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineHiveVar() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-	
-		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
-	}
-	
-	@Test
-	public void checkBeelineFastConnect(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
-	}
-
-	@Test
-	public void checkBeelineVerbose(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineShowHeader(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-
-	@AfterClass
-	public static void cleanup() throws FileNotFoundException {
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
-	}
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
deleted file mode 100644
index 2b70909..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.AfterClass;
-import org.junit.Assert;
-
-public class TestCLI {
-	
-	static Map<String, String> results;
-	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
-	
-	@BeforeClass
-	public static void setup(){
-		
-		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
-		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
-	}
-	
-	@Test
-	public void help(){		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-H"));
-		//LOG.info(results.get("exitValue"));
-		Assert.assertEquals("Error in executing 'hive -H'", 2, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--help"));
-		Assert.assertEquals("Error in executing 'hive --help'", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-U"));
-		Assert.assertEquals("Unrecognized option should exit 1.", 1, Integer.parseInt(results.get("exitValue")));
-	}
-	 
-	@Test
-	public void sqlFromCmdLine(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void sqlFromFiles() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void silent() {
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
-	}
-	
-	@Test
-	public void verbose(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
-	}
-	
-	@Test
-	public void initialization() throws FileNotFoundException{
-		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
-			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void database(){
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveConf(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
-	}
-	
-	@Test
-	public void variableSubsitution() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@Test
-	public void hiveVar() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
-		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		}
-		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
-		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
-		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-	}
-	
-	@AfterClass
-	public static void cleanup(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-var.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hiveconf-var.sql", false));
-	}
-	 
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
deleted file mode 100644
index 0ea49ce..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.thrift.TException;
-import org.junit.Assert;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Random;
-
-
-public class TestHCatalog {
-  private static final String JOBJAR = "odpi.test.hive.hcat.job.jar";
-  private static final String HCATCORE = "odpi.test.hive.hcat.core.jar";
-
-  private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
-
-  private static IMetaStoreClient client = null;
-  private static HiveConf conf;
-  private static HCatSchema inputSchema;
-  private static HCatSchema outputSchema;
-
-  private Random rand;
-
-  @BeforeClass
-  public static void connect() throws MetaException {
-    if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
-      String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
-          "Hive conf directory ");
-      String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
-          "Hadoop conf directory ");
-      conf = new HiveConf();
-      String fileSep = System.getProperty("file.separator");
-      conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
-      conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
-      conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
-      client = new HiveMetaStoreClient(conf);
-
-    }
-  }
-
-  @Before
-  public void checkIfActive() {
-    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
-    rand = new Random();
-  }
-
-  @Test
-  public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
-      InterruptedException, URISyntaxException {
-    // Create a table to write to
-    final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema schema = new FieldSchema("line", "string", "");
-    inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
-        HCatFieldSchema.Type.STRING, schema.getComment())));
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
-        "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
-    sd = new StorageDescriptor(Arrays.asList(
-          new FieldSchema("word", "string", ""),
-          new FieldSchema("count", "int", "")),
-        null, "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
-        new HashMap<String, String>());
-    table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-    outputSchema = new HCatSchema(Arrays.asList(
-        new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
-        new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
-
-    // LATER Could I use HCatWriter here and the reader to read it?
-    // Write some stuff into a file in the location of the table
-    table = client.getTable("default", inputTable);
-    String inputFile = table.getSd().getLocation() + "/input";
-    Path inputPath = new Path(inputFile);
-    FileSystem fs = FileSystem.get(conf);
-    FSDataOutputStream out = fs.create(inputPath);
-    out.writeChars("Mary had a little lamb\n");
-    out.writeChars("its fleece was white as snow\n");
-    out.writeChars("and everywhere that Mary went\n");
-    out.writeChars("the lamb was sure to go\n");
-    out.close();
-
-    Map<String, String> env = new HashMap<>();
-    env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
-    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
-        .addArgument("--service")
-        .addArgument("jar")
-        .addArgument(System.getProperty(JOBJAR))
-        .addArgument(HCatalogMR.class.getName())
-        .addArgument("-it")
-        .addArgument(inputTable)
-        .addArgument("-ot")
-        .addArgument(outputTable)
-        .addArgument("-is")
-        .addArgument(inputSchema.getSchemaAsTypeString())
-        .addArgument("-os")
-        .addArgument(outputSchema.getSchemaAsTypeString()), env);
-    LOG.info(results.toString());
-    Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
-
-    client.dropTable("default", inputTable);
-    client.dropTable("default", outputTable);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
deleted file mode 100644
index 154fd9c..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
+++ /dev/null
@@ -1,545 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.junit.Test;
-
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.SQLWarning;
-import java.sql.Statement;
-import java.sql.Types;
-
-public class TestJdbc extends JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
-
-  /**
-   * Test simple non-statement related class.  setSchema is tested elsewhere because there's work
-   * to do for that one.  Similarly with getMetadata.
-   * @throws SQLException
-   */
-  @Test
-  public void nonStatementCalls() throws SQLException {
-    conn.clearWarnings();
-
-    boolean isAutoCommit = conn.getAutoCommit();
-    LOG.debug("Auto commit is " + isAutoCommit);
-
-    String catalog = conn.getCatalog();
-    LOG.debug("Catalog is " + catalog);
-
-    String schema = conn.getSchema();
-    LOG.debug("Schema is " + schema);
-
-    int txnIsolation = conn.getTransactionIsolation();
-    LOG.debug("Transaction Isolation is " + txnIsolation);
-
-    SQLWarning warning = conn.getWarnings();
-    while (warning != null) {
-      LOG.debug("Found a warning: " + warning.getMessage());
-      warning = warning.getNextWarning();
-    }
-
-    boolean closed = conn.isClosed();
-    LOG.debug("Is closed? " + closed);
-
-    boolean readOnly = conn.isReadOnly();
-    LOG.debug("Is read only?" + readOnly);
-
-    // Hive doesn't support catalogs, so setting this to whatever should be fine.  If we have
-    // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
-    // issues, so we may need to make this value configurable or something.
-    conn.setCatalog("fred");
-  }
-
-  /**
-   * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
-   * that on a valid table.  Same with getFunctions.
-   * @throws SQLException
-   */
-  @Test
-  public void databaseMetaDataCalls() throws SQLException {
-    DatabaseMetaData md = conn.getMetaData();
-
-    boolean boolrc = md.allTablesAreSelectable();
-    LOG.debug("All tables are selectable? " + boolrc);
-
-    String strrc = md.getCatalogSeparator();
-    LOG.debug("Catalog separator " + strrc);
-
-    strrc = md.getCatalogTerm();
-    LOG.debug("Catalog term " + strrc);
-
-    ResultSet rs = md.getCatalogs();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found catalog " + strrc);
-    }
-
-    Connection c = md.getConnection();
-
-    int intrc = md.getDatabaseMajorVersion();
-    LOG.debug("DB major version is " + intrc);
-
-    intrc = md.getDatabaseMinorVersion();
-    LOG.debug("DB minor version is " + intrc);
-
-    strrc = md.getDatabaseProductName();
-    LOG.debug("DB product name is " + strrc);
-
-    strrc = md.getDatabaseProductVersion();
-    LOG.debug("DB product version is " + strrc);
-
-    intrc = md.getDefaultTransactionIsolation();
-    LOG.debug("Default transaction isolation is " + intrc);
-
-    intrc = md.getDriverMajorVersion();
-    LOG.debug("Driver major version is " + intrc);
-
-    intrc = md.getDriverMinorVersion();
-    LOG.debug("Driver minor version is " + intrc);
-
-    strrc = md.getDriverName();
-    LOG.debug("Driver name is " + strrc);
-
-    strrc = md.getDriverVersion();
-    LOG.debug("Driver version is " + strrc);
-
-    strrc = md.getExtraNameCharacters();
-    LOG.debug("Extra name characters is " + strrc);
-
-    strrc = md.getIdentifierQuoteString();
-    LOG.debug("Identifier quote string is " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getImportedKeys("a", "b", "d");
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getIndexInfo("a", "b", "d", true, true);
-
-    intrc = md.getJDBCMajorVersion();
-    LOG.debug("JDBC major version is " + intrc);
-
-    intrc = md.getJDBCMinorVersion();
-    LOG.debug("JDBC minor version is " + intrc);
-
-    intrc = md.getMaxColumnNameLength();
-    LOG.debug("Maximum column name length is " + intrc);
-
-    strrc = md.getNumericFunctions();
-    LOG.debug("Numeric functions are " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getPrimaryKeys("a", "b", "d");
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedureColumns("a", "b", "d", "e");
-
-    strrc = md.getProcedureTerm();
-    LOG.debug("Procedures are called " + strrc);
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getProcedures("a", "b", "d");
-
-    strrc = md.getSchemaTerm();
-    LOG.debug("Schemas are called " + strrc);
-
-    rs = md.getSchemas();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found schema " + strrc);
-    }
-
-    strrc = md.getSearchStringEscape();
-    LOG.debug("Search string escape is " + strrc);
-
-    strrc = md.getStringFunctions();
-    LOG.debug("String functions are " + strrc);
-
-    strrc = md.getSystemFunctions();
-    LOG.debug("System functions are " + strrc);
-
-    rs = md.getTableTypes();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found table type " + strrc);
-    }
-
-    strrc = md.getTimeDateFunctions();
-    LOG.debug("Time/date functions are " + strrc);
-
-    rs = md.getTypeInfo();
-    while (rs.next()) {
-      strrc = rs.getString(1);
-      LOG.debug("Found type " + strrc);
-    }
-
-    // In Hive 1.2 this always returns an empty RS
-    rs = md.getUDTs("a", "b", "d", null);
-
-    boolrc = md.supportsAlterTableWithAddColumn();
-    LOG.debug("Supports alter table with add column? " + boolrc);
-
-    boolrc = md.supportsAlterTableWithDropColumn();
-    LOG.debug("Supports alter table with drop column? " + boolrc);
-
-    boolrc = md.supportsBatchUpdates();
-    LOG.debug("Supports batch updates? " + boolrc);
-
-    boolrc = md.supportsCatalogsInDataManipulation();
-    LOG.debug("Supports catalogs in data manipulation? " + boolrc);
-
-    boolrc = md.supportsCatalogsInIndexDefinitions();
-    LOG.debug("Supports catalogs in index definition? " + boolrc);
-
-    boolrc = md.supportsCatalogsInPrivilegeDefinitions();
-    LOG.debug("Supports catalogs in privilege definition? " + boolrc);
-
-    boolrc = md.supportsCatalogsInProcedureCalls();
-    LOG.debug("Supports catalogs in procedure calls? " + boolrc);
-
-    boolrc = md.supportsCatalogsInTableDefinitions();
-    LOG.debug("Supports catalogs in table definition? " + boolrc);
-
-    boolrc = md.supportsColumnAliasing();
-    LOG.debug("Supports column aliasing? " + boolrc);
-
-    boolrc = md.supportsFullOuterJoins();
-    LOG.debug("Supports full outer joins? " + boolrc);
-
-    boolrc = md.supportsGroupBy();
-    LOG.debug("Supports group by? " + boolrc);
-
-    boolrc = md.supportsLimitedOuterJoins();
-    LOG.debug("Supports limited outer joins? " + boolrc);
-
-    boolrc = md.supportsMultipleResultSets();
-    LOG.debug("Supports limited outer joins? " + boolrc);
-
-    boolrc = md.supportsNonNullableColumns();
-    LOG.debug("Supports non-nullable columns? " + boolrc);
-
-    boolrc = md.supportsOuterJoins();
-    LOG.debug("Supports outer joins? " + boolrc);
-
-    boolrc = md.supportsPositionedDelete();
-    LOG.debug("Supports positioned delete? " + boolrc);
-
-    boolrc = md.supportsPositionedUpdate();
-    LOG.debug("Supports positioned update? " + boolrc);
-
-    boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set holdability? " + boolrc);
-
-    boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
-    LOG.debug("Supports result set type? " + boolrc);
-
-    boolrc = md.supportsSavepoints();
-    LOG.debug("Supports savepoints? " + boolrc);
-
-    boolrc = md.supportsSchemasInDataManipulation();
-    LOG.debug("Supports schemas in data manipulation? " + boolrc);
-
-    boolrc = md.supportsSchemasInIndexDefinitions();
-    LOG.debug("Supports schemas in index definitions? " + boolrc);
-
-    boolrc = md.supportsSchemasInPrivilegeDefinitions();
-    LOG.debug("Supports schemas in privilege definitions? " + boolrc);
-
-    boolrc = md.supportsSchemasInProcedureCalls();
-    LOG.debug("Supports schemas in procedure calls? " + boolrc);
-
-    boolrc = md.supportsSchemasInTableDefinitions();
-    LOG.debug("Supports schemas in table definitions? " + boolrc);
-
-    boolrc = md.supportsSelectForUpdate();
-    LOG.debug("Supports select for update? " + boolrc);
-
-    boolrc = md.supportsStoredProcedures();
-    LOG.debug("Supports stored procedures? " + boolrc);
-
-    boolrc = md.supportsTransactions();
-    LOG.debug("Supports transactions? " + boolrc);
-
-    boolrc = md.supportsUnion();
-    LOG.debug("Supports union? " + boolrc);
-
-    boolrc = md.supportsUnionAll();
-    LOG.debug("Supports union all? " + boolrc);
-
-  }
-
-  @Test
-  public void setSchema() throws SQLException {
-    try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
-        ResultSet.CONCUR_READ_ONLY)) {
-
-      final String dbName = "odpi_jdbc_test_db";
-
-      final String tableName = "odpi_jdbc_test_table";
-      stmt.execute("drop table if exists " + tableName);
-
-      stmt.execute("drop database if exists " + dbName + " cascade");
-      stmt.execute("create database " + dbName);
-
-      conn.setSchema(dbName);
-
-      DatabaseMetaData md = conn.getMetaData();
-
-      ResultSet rs = md.getSchemas(null, dbName);
-
-      while (rs.next()) {
-        String schemaName = rs.getString(2);
-        LOG.debug("Schema name is " + schemaName);
-      }
-
-      stmt.execute("create table " + tableName + " (i int, s varchar(32))");
-
-      rs = md.getTables(null, dbName, tableName, null);
-      while (rs.next()) {
-        String tName = rs.getString(3);
-        LOG.debug("Schema name is " + tName);
-      }
-
-      rs = md.getColumns(null, dbName, tableName, "i");
-      while (rs.next()) {
-        String colName = rs.getString(4);
-        LOG.debug("Schema name is " + colName);
-      }
-
-      rs = md.getFunctions(null, dbName, "foo");
-      while (rs.next()) {
-        String funcName = rs.getString(3);
-        LOG.debug("Schema name is " + funcName);
-      }
-    }
-  }
-
-  @Test
-  public void statement() throws SQLException {
-    try (Statement stmt = conn.createStatement()) {
-      stmt.cancel();
-    }
-
-    try (Statement stmt = conn.createStatement()) {
-      stmt.clearWarnings();
-
-      final String tableName = "odpi_jdbc_statement_test_table";
-
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (a int, b varchar(32))");
-
-      stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
-
-      int intrc = stmt.getUpdateCount();
-      LOG.debug("Update count is " + intrc);
-
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
-
-      Connection localConn = stmt.getConnection();
-
-      intrc = stmt.getFetchDirection();
-      LOG.debug("Fetch direction is " + intrc);
-
-      intrc = stmt.getFetchSize();
-      LOG.debug("Fetch size is " + intrc);
-
-      intrc = stmt.getMaxRows();
-      LOG.debug("max rows is " + intrc);
-
-      boolean boolrc = stmt.getMoreResults();
-      LOG.debug("more results is " + boolrc);
-
-      intrc = stmt.getQueryTimeout();
-      LOG.debug("query timeout is " + intrc);
-
-      stmt.execute("select * from " + tableName);
-      rs = stmt.getResultSet();
-      while (rs.next()) {
-        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
-      }
-
-      intrc = stmt.getResultSetType();
-      LOG.debug("result set type is " + intrc);
-
-      SQLWarning warning = stmt.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
-
-      boolrc = stmt.isClosed();
-      LOG.debug("is closed " + boolrc);
-
-      boolrc = stmt.isCloseOnCompletion();
-      LOG.debug("is close on completion " + boolrc);
-
-      boolrc = stmt.isPoolable();
-      LOG.debug("is poolable " + boolrc);
-
-      stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
-      stmt.setFetchSize(500);
-      stmt.setMaxRows(500);
-    }
-  }
-
-  @Test
-  public void preparedStmtAndResultSet() throws SQLException {
-    final String tableName = "odpi_jdbc_psars_test_table";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + tableName);
-      stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
-          "i int, lo bigint, sh smallint, st varchar(32))");
-    }
-
-    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
-    // try to put them in the query.
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
-        " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
-      ps.setBoolean(1, true);
-      ps.setByte(2, (byte)1);
-      ps.setDouble(3, 3.141592654);
-      ps.setFloat(4, 3.14f);
-      ps.setInt(5, 3);
-      ps.setLong(6, 10L);
-      ps.setShort(7, (short)20);
-      ps.setString(8, "abc");
-      ps.executeUpdate();
-    }
-
-    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
-        "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
-      ps.setNull(1, Types.INTEGER);
-      ps.setObject(2, "mary had a little lamb");
-      ps.executeUpdate();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.clearParameters();
-      ps.setNull(1, Types.INTEGER, null);
-      ps.setString(2, "its fleece was white as snow");
-      ps.execute();
-
-    }
-
-    try (Statement stmt = conn.createStatement()) {
-
-      ResultSet rs = stmt.executeQuery("select * from " + tableName);
-
-      ResultSetMetaData md = rs.getMetaData();
-
-      int colCnt = md.getColumnCount();
-      LOG.debug("Column count is " + colCnt);
-
-      for (int i = 1; i <= colCnt; i++) {
-        LOG.debug("Looking at column " + i);
-        String strrc = md.getColumnClassName(i);
-        LOG.debug("Column class name is " + strrc);
-
-        int intrc = md.getColumnDisplaySize(i);
-        LOG.debug("Column display size is " + intrc);
-
-        strrc = md.getColumnLabel(i);
-        LOG.debug("Column label is " + strrc);
-
-        strrc = md.getColumnName(i);
-        LOG.debug("Column name is " + strrc);
-
-        intrc = md.getColumnType(i);
-        LOG.debug("Column type is " + intrc);
-
-        strrc = md.getColumnTypeName(i);
-        LOG.debug("Column type name is " + strrc);
-
-        intrc = md.getPrecision(i);
-        LOG.debug("Precision is " + intrc);
-
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
-
-        boolean boolrc = md.isAutoIncrement(i);
-        LOG.debug("Is auto increment? " + boolrc);
-
-        boolrc = md.isCaseSensitive(i);
-        LOG.debug("Is case sensitive? " + boolrc);
-
-        boolrc = md.isCurrency(i);
-        LOG.debug("Is currency? " + boolrc);
-
-        intrc = md.getScale(i);
-        LOG.debug("Scale is " + intrc);
-
-        intrc = md.isNullable(i);
-        LOG.debug("Is nullable? " + intrc);
-
-        boolrc = md.isReadOnly(i);
-        LOG.debug("Is read only? " + boolrc);
-
-      }
-
-      while (rs.next()) {
-        LOG.debug("bo = " + rs.getBoolean(1));
-        LOG.debug("bo = " + rs.getBoolean("bo"));
-        LOG.debug("ti = " + rs.getByte(2));
-        LOG.debug("ti = " + rs.getByte("ti"));
-        LOG.debug("db = " + rs.getDouble(3));
-        LOG.debug("db = " + rs.getDouble("db"));
-        LOG.debug("fl = " + rs.getFloat(4));
-        LOG.debug("fl = " + rs.getFloat("fl"));
-        LOG.debug("i = " + rs.getInt(5));
-        LOG.debug("i = " + rs.getInt("i"));
-        LOG.debug("lo = " + rs.getLong(6));
-        LOG.debug("lo = " + rs.getLong("lo"));
-        LOG.debug("sh = " + rs.getShort(7));
-        LOG.debug("sh = " + rs.getShort("sh"));
-        LOG.debug("st = " + rs.getString(8));
-        LOG.debug("st = " + rs.getString("st"));
-        LOG.debug("tm = " + rs.getObject(8));
-        LOG.debug("tm = " + rs.getObject("st"));
-        LOG.debug("tm was null " + rs.wasNull());
-      }
-      LOG.debug("bo is column " + rs.findColumn("bo"));
-
-      int intrc = rs.getConcurrency();
-      LOG.debug("concurrency " + intrc);
-
-      intrc = rs.getFetchDirection();
-      LOG.debug("fetch direction " + intrc);
-
-      intrc = rs.getType();
-      LOG.debug("type " + intrc);
-
-      Statement copy = rs.getStatement();
-
-      SQLWarning warning = rs.getWarnings();
-      while (warning != null) {
-        LOG.debug("Found a warning: " + warning.getMessage());
-        warning = warning.getNextWarning();
-      }
-      rs.clearWarnings();
-    }
-  }
-}


[49/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

IDEA code reformating


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/a05d3813
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/a05d3813
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/a05d3813

Branch: refs/heads/master
Commit: a05d3813f67979f74c0494fb118f98a0264266dc
Parents: 0f51fb3
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Wed Mar 22 09:49:53 2017 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:17 2017 -0700

----------------------------------------------------------------------
 .../odpi/specs/runtime/hadoop/ApiExaminer.java  | 732 ++++++++---------
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 173 ++--
 .../org/odpi/specs/runtime/hive/HiveHelper.java | 158 ++--
 .../odpi/specs/runtime/hive/JdbcConnector.java  |  78 +-
 .../odpi/specs/runtime/hive/TestBeeline.java    | 364 +++++----
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 387 ++++-----
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 202 ++---
 .../org/odpi/specs/runtime/hive/TestJdbc.java   | 814 ++++++++++---------
 .../org/odpi/specs/runtime/hive/TestSql.java    | 524 ++++++------
 .../org/odpi/specs/runtime/hive/TestThrift.java | 396 ++++-----
 10 files changed, 1941 insertions(+), 1887 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
index a8febdb..77db1b5 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
@@ -48,442 +48,444 @@ import java.util.regex.Pattern;
  */
 public class ApiExaminer {
 
-  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
-
-  static private Set<String> unloadableClasses;
-
-  private List<String> errors;
-  private List<String> warnings;
-
-  static {
-    unloadableClasses = new HashSet<>();
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
-    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
-    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
-
-  }
-
-  public static void main(String[] args) {
-    Options options = new Options();
-
-    options.addOption("c", "compare", true,
-        "Compare against a spec, argument is the json file containing spec");
-    options.addOption("h", "help", false, "You're looking at it");
-    options.addOption("j", "jar", true, "Jar to examine");
-    options.addOption("p", "prepare-spec", true,
-        "Prepare the spec, argument is the directory to write the spec to");
-
-    try {
-      CommandLine cli = new GnuParser().parse(options, args);
-
-      if (cli.hasOption('h')) {
-        usage(options);
-        return;
-      }
-
-      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
-          (cli.hasOption('c') && cli.hasOption('p'))) {
-        System.err.println("You must choose either -c or -p");
-        usage(options);
-        return;
-      }
-
-      if (!cli.hasOption('j')) {
-        System.err.println("You must specify the jar to prepare or compare");
-        usage(options);
-        return;
-      }
-
-      String jar = cli.getOptionValue('j');
-      ApiExaminer examiner = new ApiExaminer();
-
-      if (cli.hasOption('c')) {
-        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
-      } else if (cli.hasOption('p')) {
-        examiner.prepareExpected(jar, cli.getOptionValue('p'));
-      }
-    } catch (Exception e) {
-      System.err.println("Received exception while processing");
-      e.printStackTrace();
-    }
-  }
-
-  private static void usage(Options options) {
-    HelpFormatter help = new HelpFormatter();
-    help.printHelp("api-examiner", options);
-
-  }
-
-  private ApiExaminer() {
-  }
-
-  private void prepareExpected(String jarFile, String outputDir) throws IOException,
-      ClassNotFoundException {
-    JarInfo jarInfo = new JarInfo(jarFile, this);
-    jarInfo.dumpToFile(new File(outputDir));
-  }
-
-  private void compareAgainstStandard(String json, String jarFile) throws IOException,
-      ClassNotFoundException {
-    errors = new ArrayList<>();
-    warnings = new ArrayList<>();
-    JarInfo underTest = new JarInfo(jarFile, this);
-    JarInfo standard = jarInfoFromFile(new File(json));
-    standard.compareAndReport(underTest);
-
-    if (errors.size() > 0) {
-      System.err.println("Found " + errors.size() + " incompatibilities:");
-      for (String error : errors) {
-        System.err.println(error);
-      }
-    }
+    private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
+
+    static private Set<String> unloadableClasses;
+
+    private List<String> errors;
+    private List<String> warnings;
+
+    static {
+        unloadableClasses = new HashSet<>();
+        unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
+        unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
+        unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
+        unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
 
-    if (warnings.size() > 0) {
-      System.err.println("Found " + warnings.size() + " possible issues: ");
-      for (String warning : warnings) {
-        System.err.println(warning);
-      }
     }
 
+    public static void main(String[] args) {
+        Options options = new Options();
+
+        options.addOption("c", "compare", true,
+                "Compare against a spec, argument is the json file containing spec");
+        options.addOption("h", "help", false, "You're looking at it");
+        options.addOption("j", "jar", true, "Jar to examine");
+        options.addOption("p", "prepare-spec", true,
+                "Prepare the spec, argument is the directory to write the spec to");
 
-  }
+        try {
+            CommandLine cli = new GnuParser().parse(options, args);
 
-  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
-    jarInfo.patchUpClassBackPointers(this);
-    return jarInfo;
-  }
+            if (cli.hasOption('h')) {
+                usage(options);
+                return;
+            }
 
-  private static class JarInfo {
-    String name;
-    String version;
-    ApiExaminer container;
-    Map<String, ClassInfo> classes;
+            if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
+                    (cli.hasOption('c') && cli.hasOption('p'))) {
+                System.err.println("You must choose either -c or -p");
+                usage(options);
+                return;
+            }
 
-    // For use by Jackson
-    public JarInfo() {
+            if (!cli.hasOption('j')) {
+                System.err.println("You must specify the jar to prepare or compare");
+                usage(options);
+                return;
+            }
 
-    }
+            String jar = cli.getOptionValue('j');
+            ApiExaminer examiner = new ApiExaminer();
 
-    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
-      this.container = container;
-      LOG.info("Processing jar " + jarFile);
-      File f = new File(jarFile);
-      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
-      Matcher matcher = pattern.matcher(f.getName());
-      if (!matcher.matches()) {
-        String msg = "Unable to determine name and version from " + f.getName();
-        LOG.error(msg);
-        throw new RuntimeException(msg);
-      }
-      name = matcher.group(1);
-      version = matcher.group(2);
-      classes = new HashMap<>();
-
-      JarFile jar = new JarFile(jarFile);
-      Enumeration<JarEntry> entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        String name = entries.nextElement().getName();
-        if (name.endsWith(".class")) {
-          name = name.substring(0, name.length() - 6);
-          name = name.replace('/', '.');
-          if (!unloadableClasses.contains(name)) {
-            LOG.debug("Processing class " + name);
-            Class<?> clazz = Class.forName(name);
-            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
-                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
-              classes.put(name, new ClassInfo(this, clazz));
+            if (cli.hasOption('c')) {
+                examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
+            } else if (cli.hasOption('p')) {
+                examiner.prepareExpected(jar, cli.getOptionValue('p'));
             }
-          }
+        } catch (Exception e) {
+            System.err.println("Received exception while processing");
+            e.printStackTrace();
         }
-      }
     }
 
-    public String getName() {
-      return name;
-    }
+    private static void usage(Options options) {
+        HelpFormatter help = new HelpFormatter();
+        help.printHelp("api-examiner", options);
 
-    public void setName(String name) {
-      this.name = name;
     }
 
-    public String getVersion() {
-      return version;
+    private ApiExaminer() {
     }
 
-    public void setVersion(String version) {
-      this.version = version;
+    private void prepareExpected(String jarFile, String outputDir) throws IOException,
+            ClassNotFoundException {
+        JarInfo jarInfo = new JarInfo(jarFile, this);
+        jarInfo.dumpToFile(new File(outputDir));
     }
 
-    public Map<String, ClassInfo> getClasses() {
-      return classes;
+    private void compareAgainstStandard(String json, String jarFile) throws IOException,
+            ClassNotFoundException {
+        errors = new ArrayList<>();
+        warnings = new ArrayList<>();
+        JarInfo underTest = new JarInfo(jarFile, this);
+        JarInfo standard = jarInfoFromFile(new File(json));
+        standard.compareAndReport(underTest);
+
+        if (errors.size() > 0) {
+            System.err.println("Found " + errors.size() + " incompatibilities:");
+            for (String error : errors) {
+                System.err.println(error);
+            }
+        }
+
+        if (warnings.size() > 0) {
+            System.err.println("Found " + warnings.size() + " possible issues: ");
+            for (String warning : warnings) {
+                System.err.println(warning);
+            }
+        }
+
+
     }
 
-    public void setClasses(Map<String, ClassInfo> classes) {
-      this.classes = classes;
+    private JarInfo jarInfoFromFile(File inputFile) throws IOException {
+        ObjectMapper mapper = new ObjectMapper();
+        JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
+        jarInfo.patchUpClassBackPointers(this);
+        return jarInfo;
     }
 
-    void compareAndReport(JarInfo underTest) {
-      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
-      for (ClassInfo classInfo : classes.values()) {
-        if (underTestClasses.contains(classInfo)) {
-          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
-          underTestClasses.remove(classInfo);
-        } else {
-          container.errors.add(underTest + " does not contain class " + classInfo);
+    private static class JarInfo {
+        String name;
+        String version;
+        ApiExaminer container;
+        Map<String, ClassInfo> classes;
+
+        // For use by Jackson
+        public JarInfo() {
+
         }
-      }
 
-      if (underTestClasses.size() > 0) {
-        for (ClassInfo extra : underTestClasses) {
-          container.warnings.add(underTest + " contains extra class " + extra);
+        JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
+            this.container = container;
+            LOG.info("Processing jar " + jarFile);
+            File f = new File(jarFile);
+            Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
+            Matcher matcher = pattern.matcher(f.getName());
+            if (!matcher.matches()) {
+                String msg = "Unable to determine name and version from " + f.getName();
+                LOG.error(msg);
+                throw new RuntimeException(msg);
+            }
+            name = matcher.group(1);
+            version = matcher.group(2);
+            classes = new HashMap<>();
+
+            JarFile jar = new JarFile(jarFile);
+            Enumeration<JarEntry> entries = jar.entries();
+            while (entries.hasMoreElements()) {
+                String name = entries.nextElement().getName();
+                if (name.endsWith(".class")) {
+                    name = name.substring(0, name.length() - 6);
+                    name = name.replace('/', '.');
+                    if (!unloadableClasses.contains(name)) {
+                        LOG.debug("Processing class " + name);
+                        Class<?> clazz = Class.forName(name);
+                        if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
+                                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
+                            classes.put(name, new ClassInfo(this, clazz));
+                        }
+                    }
+                }
+            }
         }
-      }
-    }
 
-    void dumpToFile(File outputDir) throws IOException {
-      File output = new File(outputDir, name + "-" + version + "-api-report.json");
-      ObjectMapper mapper = new ObjectMapper();
-      mapper.writeValue(output, this);
-    }
+        public String getName() {
+            return name;
+        }
 
-    void patchUpClassBackPointers(ApiExaminer container) {
-      this.container = container;
-      for (ClassInfo classInfo : classes.values()) {
-        classInfo.setJar(this);
-        classInfo.patchUpBackMethodBackPointers();
-      }
-    }
+        public void setName(String name) {
+            this.name = name;
+        }
 
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof JarInfo)) return false;
-      JarInfo that = (JarInfo)other;
-      return name.equals(that.name) && version.equals(that.version);
-    }
+        public String getVersion() {
+            return version;
+        }
 
-    @Override
-    public String toString() {
-      return name + "-" + version;
-    }
-  }
+        public void setVersion(String version) {
+            this.version = version;
+        }
 
-  private static class ClassInfo {
-    @JsonIgnore JarInfo jar;
-    String name;
-    Map<String, MethodInfo> methods;
+        public Map<String, ClassInfo> getClasses() {
+            return classes;
+        }
 
-    // For use by Jackson
-    public ClassInfo() {
+        public void setClasses(Map<String, ClassInfo> classes) {
+            this.classes = classes;
+        }
 
-    }
+        void compareAndReport(JarInfo underTest) {
+            Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
+            for (ClassInfo classInfo : classes.values()) {
+                if (underTestClasses.contains(classInfo)) {
+                    classInfo.compareAndReport(underTest.classes.get(classInfo.name));
+                    underTestClasses.remove(classInfo);
+                } else {
+                    container.errors.add(underTest + " does not contain class " + classInfo);
+                }
+            }
 
-    ClassInfo(JarInfo jar, Class<?> clazz) {
-      this.jar = jar;
-      this.name = clazz.getName();
-      methods = new HashMap<>();
+            if (underTestClasses.size() > 0) {
+                for (ClassInfo extra : underTestClasses) {
+                    container.warnings.add(underTest + " contains extra class " + extra);
+                }
+            }
+        }
 
-      for (Method method : clazz.getMethods()) {
-        if (method.getDeclaringClass().equals(clazz)) {
-          LOG.debug("Processing method " + method.getName());
-          MethodInfo mi = new MethodInfo(this, method);
-          methods.put(mi.toString(), mi);
+        void dumpToFile(File outputDir) throws IOException {
+            File output = new File(outputDir, name + "-" + version + "-api-report.json");
+            ObjectMapper mapper = new ObjectMapper();
+            mapper.writeValue(output, this);
         }
-      }
-    }
 
-    public JarInfo getJar() {
-      return jar;
-    }
+        void patchUpClassBackPointers(ApiExaminer container) {
+            this.container = container;
+            for (ClassInfo classInfo : classes.values()) {
+                classInfo.setJar(this);
+                classInfo.patchUpBackMethodBackPointers();
+            }
+        }
 
-    public void setJar(JarInfo jar) {
-      this.jar = jar;
-    }
+        @Override
+        public boolean equals(Object other) {
+            if (!(other instanceof JarInfo)) return false;
+            JarInfo that = (JarInfo) other;
+            return name.equals(that.name) && version.equals(that.version);
+        }
 
-    public String getName() {
-      return name;
+        @Override
+        public String toString() {
+            return name + "-" + version;
+        }
     }
 
-    public void setName(String name) {
-      this.name = name;
-    }
+    private static class ClassInfo {
+        @JsonIgnore
+        JarInfo jar;
+        String name;
+        Map<String, MethodInfo> methods;
 
-    public Map<String, MethodInfo> getMethods() {
-      return methods;
-    }
+        // For use by Jackson
+        public ClassInfo() {
 
-    public void setMethods(Map<String, MethodInfo> methods) {
-      this.methods = methods;
-    }
+        }
 
-    void compareAndReport(ClassInfo underTest) {
-      // Make a copy so we can remove them as we match them, making it easy to find additional ones
-      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
-      for (MethodInfo methodInfo : methods.values()) {
-        if (underTestMethods.contains(methodInfo)) {
-          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
-          underTestMethods.remove(methodInfo);
-        } else {
-          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+        ClassInfo(JarInfo jar, Class<?> clazz) {
+            this.jar = jar;
+            this.name = clazz.getName();
+            methods = new HashMap<>();
+
+            for (Method method : clazz.getMethods()) {
+                if (method.getDeclaringClass().equals(clazz)) {
+                    LOG.debug("Processing method " + method.getName());
+                    MethodInfo mi = new MethodInfo(this, method);
+                    methods.put(mi.toString(), mi);
+                }
+            }
         }
-      }
 
-      if (underTestMethods.size() > 0) {
-        for (MethodInfo extra : underTestMethods) {
-          jar.container.warnings.add(underTest + " contains extra method " + extra);
+        public JarInfo getJar() {
+            return jar;
         }
-      }
-    }
 
-    void patchUpBackMethodBackPointers() {
-      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
-    }
+        public void setJar(JarInfo jar) {
+            this.jar = jar;
+        }
 
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof ClassInfo)) return false;
-      ClassInfo that = (ClassInfo)other;
-      return name.equals(that.name);  // Classes can be compared just on names
-    }
+        public String getName() {
+            return name;
+        }
 
-    @Override
-    public int hashCode() {
-      return name.hashCode();
-    }
+        public void setName(String name) {
+            this.name = name;
+        }
 
-    @Override
-    public String toString() {
-      return jar + " " + name;
-    }
-  }
+        public Map<String, MethodInfo> getMethods() {
+            return methods;
+        }
 
-  private static class MethodInfo {
-    @JsonIgnore ClassInfo containingClass;
-    String name;
-    String returnType;
-    List<String> args;
-    Set<String> exceptions;
+        public void setMethods(Map<String, MethodInfo> methods) {
+            this.methods = methods;
+        }
 
-    // For use by Jackson
-    public MethodInfo() {
+        void compareAndReport(ClassInfo underTest) {
+            // Make a copy so we can remove them as we match them, making it easy to find additional ones
+            Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
+            for (MethodInfo methodInfo : methods.values()) {
+                if (underTestMethods.contains(methodInfo)) {
+                    methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
+                    underTestMethods.remove(methodInfo);
+                } else {
+                    jar.container.errors.add(underTest + " does not contain method " + methodInfo);
+                }
+            }
 
-    }
+            if (underTestMethods.size() > 0) {
+                for (MethodInfo extra : underTestMethods) {
+                    jar.container.warnings.add(underTest + " contains extra method " + extra);
+                }
+            }
+        }
 
-    MethodInfo(ClassInfo containingClass, Method method) {
-      this.containingClass = containingClass;
-      this.name = method.getName();
-      args = new ArrayList<>();
-      for (Class<?> argClass : method.getParameterTypes()) {
-        args.add(argClass.getName());
-      }
-      returnType = method.getReturnType().getName();
-      exceptions = new HashSet<>();
-      for (Class<?> exception : method.getExceptionTypes()) {
-        exceptions.add(exception.getName());
-      }
-    }
+        void patchUpBackMethodBackPointers() {
+            for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
+        }
 
-    public ClassInfo getContainingClass() {
-      return containingClass;
-    }
+        @Override
+        public boolean equals(Object other) {
+            if (!(other instanceof ClassInfo)) return false;
+            ClassInfo that = (ClassInfo) other;
+            return name.equals(that.name);  // Classes can be compared just on names
+        }
 
-    public void setContainingClass(ClassInfo containingClass) {
-      this.containingClass = containingClass;
-    }
+        @Override
+        public int hashCode() {
+            return name.hashCode();
+        }
 
-    public String getName() {
-      return name;
+        @Override
+        public String toString() {
+            return jar + " " + name;
+        }
     }
 
-    public void setName(String name) {
-      this.name = name;
-    }
+    private static class MethodInfo {
+        @JsonIgnore
+        ClassInfo containingClass;
+        String name;
+        String returnType;
+        List<String> args;
+        Set<String> exceptions;
 
-    public String getReturnType() {
-      return returnType;
-    }
+        // For use by Jackson
+        public MethodInfo() {
 
-    public void setReturnType(String returnType) {
-      this.returnType = returnType;
-    }
+        }
 
-    public List<String> getArgs() {
-      return args;
-    }
+        MethodInfo(ClassInfo containingClass, Method method) {
+            this.containingClass = containingClass;
+            this.name = method.getName();
+            args = new ArrayList<>();
+            for (Class<?> argClass : method.getParameterTypes()) {
+                args.add(argClass.getName());
+            }
+            returnType = method.getReturnType().getName();
+            exceptions = new HashSet<>();
+            for (Class<?> exception : method.getExceptionTypes()) {
+                exceptions.add(exception.getName());
+            }
+        }
 
-    public void setArgs(List<String> args) {
-      this.args = args;
-    }
+        public ClassInfo getContainingClass() {
+            return containingClass;
+        }
 
-    public Set<String> getExceptions() {
-      return exceptions;
-    }
+        public void setContainingClass(ClassInfo containingClass) {
+            this.containingClass = containingClass;
+        }
 
-    public void setExceptions(Set<String> exceptions) {
-      this.exceptions = exceptions;
-    }
+        public String getName() {
+            return name;
+        }
 
-    void compareAndReport(MethodInfo underTest) {
-      // Check to see if they've added or removed exceptions
-      // Make a copy so I can remove them as I check them off and easily find any that have been
-      // added.
-      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
-      for (String exception : exceptions) {
-        if (underTest.exceptions.contains(exception)) {
-          underTestExceptions.remove(exception);
-        } else {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " removes exception " + exception);
-        }
-      }
-      if (underTestExceptions.size() > 0) {
-        for (String underTestException : underTest.exceptions) {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " adds exception " + underTestException);
-        }
-      }
-    }
+        public void setName(String name) {
+            this.name = name;
+        }
 
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof MethodInfo)) return false;
-      MethodInfo that = (MethodInfo)other;
+        public String getReturnType() {
+            return returnType;
+        }
 
-      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
-          returnType.equals(that.returnType) && args.equals(that.args);
-    }
+        public void setReturnType(String returnType) {
+            this.returnType = returnType;
+        }
 
-    @Override
-    public int hashCode() {
-      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
-          args.hashCode();
-    }
+        public List<String> getArgs() {
+            return args;
+        }
+
+        public void setArgs(List<String> args) {
+            this.args = args;
+        }
 
-    @Override
-    public String toString() {
-      StringBuilder buf = new StringBuilder(returnType)
-          .append(" ")
-          .append(name)
-          .append('(');
-      boolean first = true;
-      for (String arg : args) {
-        if (first) first = false;
-        else buf.append(", ");
-        buf.append(arg);
-      }
-      buf.append(")");
-      if (exceptions.size() > 0) {
-        buf.append(" throws ");
-        first = true;
-        for (String exception : exceptions) {
-          if (first) first = false;
-          else buf.append(", ");
-          buf.append(exception);
-        }
-      }
-      return buf.toString();
+        public Set<String> getExceptions() {
+            return exceptions;
+        }
+
+        public void setExceptions(Set<String> exceptions) {
+            this.exceptions = exceptions;
+        }
+
+        void compareAndReport(MethodInfo underTest) {
+            // Check to see if they've added or removed exceptions
+            // Make a copy so I can remove them as I check them off and easily find any that have been
+            // added.
+            Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
+            for (String exception : exceptions) {
+                if (underTest.exceptions.contains(exception)) {
+                    underTestExceptions.remove(exception);
+                } else {
+                    containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+                            underTest.containingClass + "." + name + " removes exception " + exception);
+                }
+            }
+            if (underTestExceptions.size() > 0) {
+                for (String underTestException : underTest.exceptions) {
+                    containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
+                            underTest.containingClass + "." + name + " adds exception " + underTestException);
+                }
+            }
+        }
+
+        @Override
+        public boolean equals(Object other) {
+            if (!(other instanceof MethodInfo)) return false;
+            MethodInfo that = (MethodInfo) other;
+
+            return containingClass.equals(that.containingClass) && name.equals(that.name) &&
+                    returnType.equals(that.returnType) && args.equals(that.args);
+        }
+
+        @Override
+        public int hashCode() {
+            return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
+                    args.hashCode();
+        }
+
+        @Override
+        public String toString() {
+            StringBuilder buf = new StringBuilder(returnType)
+                    .append(" ")
+                    .append(name)
+                    .append('(');
+            boolean first = true;
+            for (String arg : args) {
+                if (first) first = false;
+                else buf.append(", ");
+                buf.append(arg);
+            }
+            buf.append(")");
+            if (exceptions.size() > 0) {
+                buf.append(" throws ");
+                first = true;
+                for (String exception : exceptions) {
+                    if (first) first = false;
+                    else buf.append(", ");
+                    buf.append(exception);
+                }
+            }
+            return buf.toString();
+        }
     }
-  }
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 6456cf2..ccc15eb 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -41,97 +41,98 @@ import java.net.URI;
 import java.util.StringTokenizer;
 
 public class HCatalogMR extends Configured implements Tool {
-  private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
-  private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
-
-  @Override
-  public int run(String[] args) throws Exception {
-    String inputTable = null;
-    String outputTable = null;
-    String inputSchemaStr = null;
-    String outputSchemaStr = null;
-    for(int i = 0; i < args.length; i++){
-        if(args[i].equalsIgnoreCase("-it")){
-            inputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-ot")){
-            outputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-is")){
-            inputSchemaStr = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-os")){
-            outputSchemaStr = args[i+1];
+    private final static String INPUT_SCHEMA = "bigtop.test.hcat.schema.input";
+    private final static String OUTPUT_SCHEMA = "bigtop.test.hcat.schema.output";
+
+    @Override
+    public int run(String[] args) throws Exception {
+        String inputTable = null;
+        String outputTable = null;
+        String inputSchemaStr = null;
+        String outputSchemaStr = null;
+        for (int i = 0; i < args.length; i++) {
+            if (args[i].equalsIgnoreCase("-it")) {
+                inputTable = args[i + 1];
+            } else if (args[i].equalsIgnoreCase("-ot")) {
+                outputTable = args[i + 1];
+            } else if (args[i].equalsIgnoreCase("-is")) {
+                inputSchemaStr = args[i + 1];
+            } else if (args[i].equalsIgnoreCase("-os")) {
+                outputSchemaStr = args[i + 1];
+            }
         }
+
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        conf.set(INPUT_SCHEMA, inputSchemaStr);
+        conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+        Job job = new Job(conf, "bigtop_hcat_test");
+        HCatInputFormat.setInput(job, "default", inputTable);
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setJarByClass(HCatalogMR.class);
+        job.setMapperClass(Map.class);
+        job.setReducerClass(Reduce.class);
+        job.setMapOutputKeyClass(Text.class);
+        job.setMapOutputValueClass(IntWritable.class);
+        job.setOutputKeyClass(WritableComparable.class);
+        job.setOutputValueClass(HCatRecord.class);
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+        HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+        job.setOutputFormatClass(HCatOutputFormat.class);
+
+        return job.waitForCompletion(true) ? 0 : 1;
+
+
     }
-    
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    conf.set(INPUT_SCHEMA, inputSchemaStr);
-    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
-
-    Job job = new Job(conf, "bigtop_hcat_test");
-    HCatInputFormat.setInput(job, "default", inputTable);
-
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setJarByClass(HCatalogMR.class);
-    job.setMapperClass(Map.class);
-    job.setReducerClass(Reduce.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(IntWritable.class);
-    job.setOutputKeyClass(WritableComparable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
-    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
-    job.setOutputFormatClass(HCatOutputFormat.class);
-
-    return job.waitForCompletion(true) ? 0 : 1;
-
-
-  }
-  public static class Map extends Mapper<WritableComparable,
-          HCatRecord, Text, IntWritable> {
-    private final static IntWritable one = new IntWritable(1);
-    private Text word = new Text();
-    private HCatSchema inputSchema = null;
 
-    @Override
-    protected void map(WritableComparable key, HCatRecord value, Context context)
-        throws IOException, InterruptedException {
-      if (inputSchema == null) {
-        inputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
-      }
-      String line = value.getString("line", inputSchema);
-      StringTokenizer tokenizer = new StringTokenizer(line);
-      while (tokenizer.hasMoreTokens()) {
-        word.set(tokenizer.nextToken());
-        context.write(word, one);
-      }
+    public static class Map extends Mapper<WritableComparable,
+            HCatRecord, Text, IntWritable> {
+        private final static IntWritable one = new IntWritable(1);
+        private Text word = new Text();
+        private HCatSchema inputSchema = null;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value, Context context)
+                throws IOException, InterruptedException {
+            if (inputSchema == null) {
+                inputSchema =
+                        HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+            }
+            String line = value.getString("line", inputSchema);
+            StringTokenizer tokenizer = new StringTokenizer(line);
+            while (tokenizer.hasMoreTokens()) {
+                word.set(tokenizer.nextToken());
+                context.write(word, one);
+            }
+        }
     }
-  }
 
-  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
-    private HCatSchema outputSchema = null;
+    public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+        private HCatSchema outputSchema = null;
 
-    @Override
-    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
-        IOException, InterruptedException {
-      if (outputSchema == null) {
-        outputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
-      }
-      int sum = 0;
-      for (IntWritable i : values) {
-        sum += i.get();
-      }
-      HCatRecord output = new DefaultHCatRecord(2);
-      output.set("word", outputSchema, key);
-      output.set("count", outputSchema, sum);
-      context.write(null, output);
+        @Override
+        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+                IOException, InterruptedException {
+            if (outputSchema == null) {
+                outputSchema =
+                        HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+            }
+            int sum = 0;
+            for (IntWritable i : values) {
+                sum += i.get();
+            }
+            HCatRecord output = new DefaultHCatRecord(2);
+            output.set("word", outputSchema, key);
+            output.set("count", outputSchema, sum);
+            context.write(null, output);
+        }
     }
-  }
 
-  public static void main(String[] args) throws Exception {
-    int exitCode = ToolRunner.run(new HCatalogMR(), args);
-    System.exit(exitCode);
-  }
- }
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new HCatalogMR(), args);
+        System.exit(exitCode);
+    }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index ee20588..f722d63 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -34,88 +34,88 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 public class HiveHelper {
-	
-	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
 
-	public static Map<String, String> execCommand(CommandLine commandline) {
-		return execCommand(commandline, null);
-	}
+    private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
 
-	public static Map<String, String> execCommand(CommandLine commandline,
-																								Map<String, String> envVars) {
-		
-		System.out.println("Executing command:");
-		System.out.println(commandline.toString());
-		Map<String, String> env = null;
-		Map<String, String> entry = new HashMap<String, String>();
-		try {
-			env = EnvironmentUtils.getProcEnvironment();
-		} catch (IOException e1) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to get process environment: "+ e1.getMessage());
-			e1.printStackTrace();
-		}
-		if (envVars != null) {
-			for (String key : envVars.keySet()) {
-				env.put(key, envVars.get(key));
-			}
-		}
+    public static Map<String, String> execCommand(CommandLine commandline) {
+        return execCommand(commandline, null);
+    }
 
-		DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
-		ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-		PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
-		ExecuteWatchdog watchdog = new ExecuteWatchdog(60*10000);
-		Executor executor = new DefaultExecutor();
-		executor.setExitValue(1);
-		executor.setWatchdog(watchdog);
-		executor.setStreamHandler(streamHandler);
-		try {
-			executor.execute(commandline, env, resultHandler);
-		} catch (ExecuteException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString() + e.getMessage());
-			e.printStackTrace();
-			return entry;
-		}
-		
-		try {
-			resultHandler.waitFor();
-			/*System.out.println("Command output: "+outputStream.toString());*/
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			return entry;
-		} catch (InterruptedException e) {
-			// TODO Auto-generated catch block
+    public static Map<String, String> execCommand(CommandLine commandline,
+                                                  Map<String, String> envVars) {
+
+        System.out.println("Executing command:");
+        System.out.println(commandline.toString());
+        Map<String, String> env = null;
+        Map<String, String> entry = new HashMap<String, String>();
+        try {
+            env = EnvironmentUtils.getProcEnvironment();
+        } catch (IOException e1) {
+            // TODO Auto-generated catch block
+            LOG.debug("Failed to get process environment: " + e1.getMessage());
+            e1.printStackTrace();
+        }
+        if (envVars != null) {
+            for (String key : envVars.keySet()) {
+                env.put(key, envVars.get(key));
+            }
+        }
+
+        DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
+        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+        PumpStreamHandler streamHandler = new PumpStreamHandler(outputStream);
+        ExecuteWatchdog watchdog = new ExecuteWatchdog(60 * 10000);
+        Executor executor = new DefaultExecutor();
+        executor.setExitValue(1);
+        executor.setWatchdog(watchdog);
+        executor.setStreamHandler(streamHandler);
+        try {
+            executor.execute(commandline, env, resultHandler);
+        } catch (ExecuteException e) {
+            // TODO Auto-generated catch block
+            LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+            LOG.debug("outputStream: " + outputStream.toString());
+            entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+            entry.put("outputStream", outputStream.toString() + e.getMessage());
+            e.printStackTrace();
+            return entry;
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            LOG.debug("Failed to execute command with exit value: " + String.valueOf(resultHandler.getExitValue()));
+            LOG.debug("outputStream: " + outputStream.toString());
+            entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+            entry.put("outputStream", outputStream.toString() + e.getMessage());
+            e.printStackTrace();
+            return entry;
+        }
+
+        try {
+            resultHandler.waitFor();
+            /*System.out.println("Command output: "+outputStream.toString());*/
+            entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+            entry.put("outputStream", outputStream.toString());
+            return entry;
+        } catch (InterruptedException e) {
+            // TODO Auto-generated catch block
 			/*System.out.println("Command output: "+outputStream.toString());*/
-			LOG.debug("exitValue: "+ String.valueOf(resultHandler.getExitValue()));
-			LOG.debug("outputStream: "+ outputStream.toString());
-			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
-			entry.put("outputStream", outputStream.toString());
-			e.printStackTrace();		
-			return entry;
-		}
-	}
-	
-	protected static String getProperty(String property, String description) {
-		String val = System.getProperty(property);
-		if (val == null) {
-			throw new RuntimeException("You must set the property " + property + " with " +
-				description);
-		}
-		LOG.debug(description + " is " + val);
-		return val;
-	 }
-	
+            LOG.debug("exitValue: " + String.valueOf(resultHandler.getExitValue()));
+            LOG.debug("outputStream: " + outputStream.toString());
+            entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+            entry.put("outputStream", outputStream.toString());
+            e.printStackTrace();
+            return entry;
+        }
+    }
+
+    protected static String getProperty(String property, String description) {
+        String val = System.getProperty(property);
+        if (val == null) {
+            throw new RuntimeException("You must set the property " + property + " with " +
+                    description);
+        }
+        LOG.debug(description + " is " + val);
+        return val;
+    }
+
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index 3b3ac51..35b9a3a 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -28,52 +28,52 @@ import java.sql.SQLException;
 import java.util.Properties;
 
 public class JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+    private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
 
-  protected static final String URL = "bigtop.test.hive.jdbc.url";
-  protected static final String USER = "bigtop.test.hive.jdbc.user";
-  protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
-  protected static final String LOCATION = "bigtop.test.hive.location";
-  protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
-  protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
-  protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
-  protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
-  protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
+    protected static final String URL = "bigtop.test.hive.jdbc.url";
+    protected static final String USER = "bigtop.test.hive.jdbc.user";
+    protected static final String PASSWD = "bigtop.test.hive.jdbc.password";
+    protected static final String LOCATION = "bigtop.test.hive.location";
+    protected static final String METASTORE_URL = "bigtop.test.hive.metastore.url";
+    protected static final String TEST_THRIFT = "bigtop.test.hive.thrift.test";
+    protected static final String TEST_HCATALOG = "bigtop.test.hive.hcatalog.test";
+    protected static final String HIVE_CONF_DIR = "bigtop.test.hive.conf.dir";
+    protected static final String HADOOP_CONF_DIR = "bigtop.test.hadoop.conf.dir";
 
-  protected static Connection conn;
+    protected static Connection conn;
 
-  @BeforeClass
-  public static void connectToJdbc() throws SQLException {
-    // Assume they've put the URL for the JDBC driver in an environment variable.
-    String jdbcUrl = getProperty(URL, "the JDBC URL");
-    String jdbcUser = getProperty(USER, "the JDBC user name");
-    String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
+    @BeforeClass
+    public static void connectToJdbc() throws SQLException {
+        // Assume they've put the URL for the JDBC driver in an environment variable.
+        String jdbcUrl = getProperty(URL, "the JDBC URL");
+        String jdbcUser = getProperty(USER, "the JDBC user name");
+        String jdbcPasswd = getProperty(PASSWD, "the JDBC password");
 
-    Properties props = new Properties();
-    props.put("user", jdbcUser);
-    if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
-    conn = DriverManager.getConnection(jdbcUrl, props);
-  }
+        Properties props = new Properties();
+        props.put("user", jdbcUser);
+        if (!jdbcPasswd.equals("")) props.put("password", jdbcPasswd);
+        conn = DriverManager.getConnection(jdbcUrl, props);
+    }
 
-  @AfterClass
-  public static void closeJdbc() throws SQLException {
-    if (conn != null) conn.close();
-  }
+    @AfterClass
+    public static void closeJdbc() throws SQLException {
+        if (conn != null) conn.close();
+    }
 
-  protected static String getProperty(String property, String description) {
-    String val = System.getProperty(property);
-    if (val == null) {
-      throw new RuntimeException("You must set the property " + property + " with " +
-          description);
+    protected static String getProperty(String property, String description) {
+        String val = System.getProperty(property);
+        if (val == null) {
+            throw new RuntimeException("You must set the property " + property + " with " +
+                    description);
+        }
+        LOG.debug(description + " is " + val);
+        return val;
     }
-    LOG.debug(description + " is " + val);
-    return val;
-  }
 
-  protected static boolean testActive(String property, String description) {
-    String val = System.getProperty(property, "true");
-    LOG.debug(description + " is " + val);
-    return Boolean.valueOf(val);
-  }
+    protected static boolean testActive(String property, String description) {
+        String val = System.getProperty(property, "true");
+        LOG.debug(description + " is " + val);
+        return Boolean.valueOf(val);
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index bc2ab77..85d824e 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -24,178 +24,204 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import java.io.FileNotFoundException;
 import java.io.PrintWriter;
 import java.util.Map;
 
 public class TestBeeline {
-	
-	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-	
-	private static final String URL = "bigtop.test.hive.jdbc.url";
-	private static final String USER = "bigtop.test.hive.jdbc.user";
-	private static final String PASSWD = "bigtop.test.hive.jdbc.password";
-	
-	private static Map<String, String> results;
-	private static String beelineUrl; 
-	private static String beelineUser;
-	private static String beelinePasswd;
-	
-	//creating beeline base command with username and password as per inputs
-	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
-
-	@BeforeClass
-	public static void initialSetup(){
-		TestBeeline.beelineUrl = System.getProperty(URL);
-		TestBeeline.beelineUser = System.getProperty(USER);
-		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-
-		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
-		}
-		else if (beelineUser != null && beelineUser != "") 
-		{ 
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
-		}
-		else {
-			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
-		}
-		LOG.info("URL is " + beelineUrl); 
-		LOG.info("User is " + beelineUser);
-		LOG.info("Passwd is " + beelinePasswd); 
-		LOG.info("Passwd is null " + (beelinePasswd == null));
-	}
-
-	@Test
-	public void checkBeeline() {
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineConnect(){
-		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
-		catch (FileNotFoundException e1) {
-			e1.printStackTrace();
-		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
-	}
-	
-	@Test
-	public void checkBeelineHelp(){
-		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromCmdLine(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
-	}
-	
-	@Test
-	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-		
-		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); out.println("CREATE DATABASE bigtop_runtime_hive;"); }
-		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE bigtop_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-		
-		if(!results.get("outputStream").contains("bigtop_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
-
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineInitFile() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); out.println("CREATE DATABASE bigtop_runtime_beeline_init;"); }
-		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE bigtop_runtime_beeline_init;"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-	
-		if(!results.get("outputStream").contains("bigtop_runtime_beeline_init")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
-	}
-	
-	@Test
-	public void checkBeelineHiveVar() throws FileNotFoundException{
-
-		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
-		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-	
-		if(!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")){
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
-		}else{
-			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
-		}
-		
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
-	}
-	
-	@Test
-	public void checkBeelineFastConnect(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
-	}
-
-	@Test
-	public void checkBeelineVerbose(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-	
-	@Test
-	public void checkBeelineShowHeader(){
-		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-	}
-
-	@AfterClass
-	public static void cleanup() throws FileNotFoundException {
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
-	}
+
+    public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+
+    private static final String URL = "bigtop.test.hive.jdbc.url";
+    private static final String USER = "bigtop.test.hive.jdbc.user";
+    private static final String PASSWD = "bigtop.test.hive.jdbc.password";
+
+    private static Map<String, String> results;
+    private static String beelineUrl;
+    private static String beelineUser;
+    private static String beelinePasswd;
+
+    //creating beeline base command with username and password as per inputs
+    private static CommandLine beelineBaseCommand = new CommandLine("beeline");
+
+    @BeforeClass
+    public static void initialSetup() {
+        TestBeeline.beelineUrl = System.getProperty(URL);
+        TestBeeline.beelineUser = System.getProperty(USER);
+        TestBeeline.beelinePasswd = System.getProperty(PASSWD);
+
+        if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") {
+            beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
+        } else if (beelineUser != null && beelineUser != "") {
+            beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+        } else {
+            beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
+        }
+        LOG.info("URL is " + beelineUrl);
+        LOG.info("User is " + beelineUser);
+        LOG.info("Passwd is " + beelinePasswd);
+        LOG.info("Passwd is null " + (beelinePasswd == null));
+    }
+
+    @Test
+    public void checkBeeline() {
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline -u FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+    }
+
+    @Test
+    public void checkBeelineConnect() {
+        try (PrintWriter out = new PrintWriter("connect.url")) {
+            out.println("!connect " + beelineUrl + " " + beelineUser + " " + beelinePasswd);
+            out.println("!quit");
+        } catch (FileNotFoundException e1) {
+            e1.printStackTrace();
+        }
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url", false));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline !connect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("connecting to " + beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+    }
+
+    @Test
+    public void checkBeelineHelp() {
+        results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline --help FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("display this message") && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+    }
+
+    @Test
+    public void checkBeelineQueryExecFromCmdLine() {
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive;"));
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE bigtop_runtime_hive;"));
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+        }
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline -e FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+        HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE bigtop_runtime_hive"));
+    }
+
+    @Test
+    public void checkBeelineQueryExecFromFile() throws FileNotFoundException {
+
+        try (PrintWriter out = new PrintWriter("beeline-f1.sql")) {
+            out.println("SHOW DATABASES;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-f2.sql")) {
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-f3.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_hive;");
+            out.println("CREATE DATABASE bigtop_runtime_hive;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-f4.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_hive;");
+        }
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+        if (!results.get("outputStream").contains("bigtop_runtime_hive")) {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql", false));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql", false));
+        }
+
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql", false));
+
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline -f FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_hive") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+        HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql", false));
+    }
+
+    @Test
+    public void checkBeelineInitFile() throws FileNotFoundException {
+
+        try (PrintWriter out = new PrintWriter("beeline-i1.sql")) {
+            out.println("SHOW DATABASES;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-i2.sql")) {
+            out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-i3.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+            out.println("CREATE DATABASE bigtop_runtime_beeline_init;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-i4.sql")) {
+            out.println("DROP DATABASE bigtop_runtime_beeline_init;");
+        }
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+
+        if (!results.get("outputStream").contains("bigtop_runtime_beeline_init")) {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql", false));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql", false));
+        }
+
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql", false));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline -i FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+        HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql", false));
+    }
+
+    @Test
+    public void checkBeelineHiveVar() throws FileNotFoundException {
+
+        try (PrintWriter out = new PrintWriter("beeline-hv1.sql")) {
+            out.println("SHOW DATABASES;");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-hv2.sql")) {
+            out.println("CREATE DATABASE ${db};");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-hv3.sql")) {
+            out.println("DROP DATABASE ${db};");
+            out.println("CREATE DATABASE ${db};");
+        }
+        try (PrintWriter out = new PrintWriter("beeline-hv4.sql")) {
+            out.println("DROP DATABASE ${db};");
+        }
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+
+        if (!results.get("outputStream").contains("bigtop_runtime_beeline_hivevar")) {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql", false));
+        } else {
+            results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql", false));
+        }
+
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql", false));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline --hivevar FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("bigtop_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+        HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=bigtop_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql", false));
+    }
+
+    @Test
+    public void checkBeelineFastConnect() {
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline --fastConnect FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
+    }
+
+    @Test
+    public void checkBeelineVerbose() {
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline --verbose FAILED." + results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+    }
+
+    @Test
+    public void checkBeelineShowHeader() {
+        results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+        String consoleMsg = results.get("outputStream").toLowerCase();
+        Assert.assertEquals("beeline --showHeader FAILED. \n" + results.get("outputStream"), true, consoleMsg.contains("default") && !consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+    }
+
+    @AfterClass
+    public static void cleanup() throws FileNotFoundException {
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+        results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+    }
 }


[02/50] [abbrv] bigtop git commit: Added logging and return in catch blocks from Alan's comments.

Posted by rv...@apache.org.
Added logging and return in catch blocks from Alan's comments.

(cherry picked from commit 84eef9f47d05392f7b8710c3897fa3bf47bca796)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/a9be6752
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/a9be6752
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/a9be6752

Branch: refs/heads/master
Commit: a9be67525558cbb5651181966297b00de78fc9a3
Parents: 62dbaf7
Author: Raj Desai <rd...@us.ibm.com>
Authored: Thu Oct 27 10:31:24 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:09 2017 -0700

----------------------------------------------------------------------
 .../java/org/odpi/specs/runtime/hive/HiveHelper.java  | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/a9be6752/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
index 2ac9cc8..a4477ff 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/HiveHelper.java
@@ -36,8 +36,7 @@ import org.apache.commons.logging.LogFactory;
 public class HiveHelper {
 	
 	private static final Log LOG = LogFactory.getLog(HiveHelper.class.getName());
-	
-	
+		
 	public static Map<String, String> execCommand(CommandLine commandline) {
 		
 		System.out.println("Executing command:");
@@ -48,6 +47,7 @@ public class HiveHelper {
 			env = EnvironmentUtils.getProcEnvironment();
 		} catch (IOException e1) {
 			// TODO Auto-generated catch block
+			LOG.debug("Failed to get process environment: "+ e1.getMessage());
 			e1.printStackTrace();
 		}
 
@@ -63,10 +63,20 @@ public class HiveHelper {
 			executor.execute(commandline, env, resultHandler);
 		} catch (ExecuteException e) {
 			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
 			e.printStackTrace();
+			return entry;
 		} catch (IOException e) {
 			// TODO Auto-generated catch block
+			LOG.debug("Failed to execute command with exit value: "+ String.valueOf(resultHandler.getExitValue()));
+			LOG.debug("outputStream: "+ outputStream.toString());
+			entry.put("exitValue", String.valueOf(resultHandler.getExitValue()));
+			entry.put("outputStream", outputStream.toString() + e.getMessage());
 			e.printStackTrace();
+			return entry;
 		}
 		
 		try {


[16/50] [abbrv] bigtop git commit: ODPI-185. Test [HIVE_HCATALOG]

Posted by rv...@apache.org.
ODPI-185. Test [HIVE_HCATALOG]

(cherry picked from commit b493a57dd679c482daa1465fd57a9854550be549)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/47c31dca
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/47c31dca
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/47c31dca

Branch: refs/heads/master
Commit: 47c31dcaa0d409f63e6869a54331c29e26f1b840
Parents: a41bb2d
Author: Roman Shaposhnik <rv...@apache.org>
Authored: Tue Nov 8 19:24:44 2016 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:13 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle         | 13 +++++++++++++
 .../java/org/odpi/specs/runtime/hive/HCatalogMR.java | 11 -----------
 .../org/odpi/specs/runtime/hive/TestHCatalog.java    | 15 +++++----------
 3 files changed, 18 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/47c31dca/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index f0166c9..a88a3b6 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -43,7 +43,20 @@ dependencies {
   if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
 }
 
+jar {
+    from {
+        (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect {
+              zipTree(it)
+        }
+    }
+
+    exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
+}
+
 test {
   // Change the default location where test data is picked up
   systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
+  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
+  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
 }
+test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/47c31dca/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index a6ff375..7cb9bbe 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -72,17 +72,6 @@ public class HCatalogMR extends Configured implements Tool {
     HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
     job.setOutputFormatClass(HCatOutputFormat.class);
 
-    // TODO All four of these jars need to be in the distributed cache of the job for the job to
-    // succeed.  I loaded them into a known location in HDFS to get them in the cache.  There may
-    // be a way to load them from a file on the gateway machine.  We could also put in a hdfs dfs
-    // -put operation into a gradle step as part of the build so that the jars are picked up from
-    // the distribution and put in a known location in HDFS from when they can be picked up in
-    // the distributed cache.
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
-    job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));
-    job.addCacheArchive(new URI("hdfs:/user/gates/libfb303-0.9.2.jar"));
-
     return job.waitForCompletion(true) ? 0 : 1;
 
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/47c31dca/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index 87e3eb0..b51db02 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -51,6 +51,8 @@ import java.util.Random;
 
 
 public class TestHCatalog {
+  private static final String JOBJAR = "odpi.test.hive.hcat.job.jar";
+  private static final String HCATCORE = "odpi.test.hive.hcat.core.jar";
 
   private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
 
@@ -132,18 +134,11 @@ public class TestHCatalog {
     out.close();
 
     Map<String, String> env = new HashMap<>();
-    // TODO These need to be set from the environment rather than hard wired
-    env.put("HADOOP_HOME","/Users/gates/grid/odpi-testing/hadoop-2.7.3");
-    env.put("HADOOP_CLASSPATH", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/hcatalog/share/hcatalog/hive-hcatalog-core-1.2.1.jar");
-    env.put("HIVE_HOME", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin");
-    Map<String, String> results = HiveHelper.execCommand(new CommandLine("/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/bin/hive")
+    env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
+    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
         .addArgument("--service")
         .addArgument("jar")
-        // TODO This is the jar built by gradle, but I didn't know how to take the jar built in
-        // the build phase and reference it in the test phase.  Perhaps a move operation could be
-        // put in the middle so the jar is moved to a known location that can be referenced here,
-        // or maybe gradle can pass in its working directory so that we can reference it from there.
-        .addArgument("/Users/gates/git/bigtop/runtime-1.2.0-SNAPSHOT.jar")
+        .addArgument(System.getProperty(JOBJAR))
         .addArgument(HCatalogMR.class.getName())
         .addArgument(inputTable)
         .addArgument(outputTable)


[32/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
deleted file mode 100644
index 6ad5f18..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-yarn-api","version":"2.7.3","classes":{"org.apache.hadoop.yarn.api.records.ApplicationAccessType":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","methods":{"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAccessType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","methods":{"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest newInstance
 (java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","methods":{"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getContainerLaunchContext()":{"na
 me":"getContainerLaunchContext","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest newInstance(org.apache.hadoop.yarn.api.records.ContainerLaunchContext, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"na
 me":"setContainerLaunchContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","methods":{"void setBlacklistAdditions(java.util.List)":{"name":"setBlacklistAdditions","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getBlacklistRemovals()":{"name":"getBlacklistRemovals","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getBlacklistAdditions()":{"name":"getBlacklistAdditions","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest newInstance(java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":["java.util.List","java.util.List"],"exceptions":[]},"void setBlacklistRemovals(java.util.List)":{"name":"
 setBlacklistRemovals","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationAttemp
 tState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","methods":{"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics":{"name":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":["int"],"exceptions":[]},"int getNumNodeManagers()":{"name":"getNumNodeManagers","returnType":"int","args":[],"exceptions":[]},"void setNumNodeManagers(int)":{"name":"
 setNumNodeManagers","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","methods":{"java.util.List getIncreaseRequests()":{"name":"getIncreaseRequests","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"void setAskList(java.util.List)":{"name":"setAskList","returnType":"void","args":["java.util.List"],"exception
 s":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"java.util.List getReleaseList()":{"name":"getReleaseList","returnType":"java.util.List","args":[],"exceptions":[]},"void setIncreaseRequests(java.util.List)":{"name":"setIncreaseRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest getResourceBlacklistRequest()":{"name":"getResourceBlacklistRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"voi
 d setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"void setResourceBlacklistRequest(org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"setResourceBlacklistRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"java.util.List getAskList()":{"name":"getAskList","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"void setReleaseList(java.util.List)":{"name":"setReleaseList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","methods":{"void setQueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"setQueueInfo","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":
 []},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse newInstance(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo()":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationReport","methods":{"void setApplicationResourceUsageReport(org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport)":{"name":"setApplicationResourceUsageReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport"],"exceptions":[]},"long getFinishTime()":{"name":"getFinishTime","returnType":"long","args":[],"exceptions":[]},"void setFinalApplicationStatus(
 org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport getApplicationResourceUsageReport()":{"name":"getApplicationResourceUsageReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"jav
 a.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.ApplicationAttemptId, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, org.apache.hadoop.yarn.api.records.Token, org.apache.hadoop.yarn.api.records.YarnApplicationState, java.lang.String, java.lang.String, long, long, org.apache.hadoop.yarn.api.records.FinalApplicationStatus, org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport, java.lang.String, float, java.lang.String, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.ApplicationAttemptId","java.lang.String","java.lang.String","java.lang.String","java.lang.String","int","org.apache.hadoop.yarn.api.records.Token","org.apache.hadoo
 p.yarn.api.records.YarnApplicationState","java.lang.String","java.lang.String","long","long","org.apache.hadoop.yarn.api.records.FinalApplicationStatus","org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","java.lang.String","float","java.lang.String","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getClientToAMToken()":{"name":"getClientToAMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setYarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState)":{"name":"setYarnApplicationState","returnType":"void","args":["org.apache.hadoo
 p.yarn.api.records.YarnApplicationState"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"void setStartTime(long)":{"name":"setStartTime","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOriginalTrackingUrl
 (java.lang.String)":{"name":"setOriginalTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState getYarnApplicationState()":{"name":"getYarnApplicationState","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setClientToAMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setClientToAMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"excepti
 ons":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"java.lang.String getOriginalTrackingUrl()":{"name":"getOriginalTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFinishTime(long)":{"name":"setFinishTime","returnType":"void","args":["long"],"exceptions":[]},"void setCurrentApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"setCurrentApplicationAttemptId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getCurrentApplicationAttemptId()":{"name":"getCurrentApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Resource":{"name":"org.apache.hadoop.yarn.api.records.Resource","methods":{"int hashCode()":{"name":"hashCo
 de","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource newInstance(int, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":["int","int"],"exceptions":[]},"void setVirtualCores(int)":{"name":"setVirtualCores","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMemory(int)":{"name":"setMemory","returnType":"void","args":["int"],"exceptions":[]},"int getMemory()":{"name":"getMemory","returnType":"int","args":[],"exceptions":[]},"int getVirtualCores()":{"name":"getVirtualCores","returnType":"int","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeReport":{"name":"org.apache.hadoop.yarn.api.records.NodeReport","methods":{"void setCapability(org.apache.hado
 op.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsed()":{"name":"getUsed","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"long getLastHealthReportTime()":{"name":"getLastHealthReportTime","returnType":"long","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void setNodeLabels(java.util.Set)":{"name":"setNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.ap
 i.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setHealthReport(java.lang.String)":{"name":"setHealthReport","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setRackName(java.lang.String)":{"name":"setRackName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setLastHealthReportTime(long)":{"name":"setLastHealthReportTime","returnType":"void","args":["long"],"exceptions":[]},"void setHttpAddress(java.lang.String)":{"name":"setHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRackName()":{"name":"getRackName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setUsed(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsed","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getHealthReport()":{"name":"getHealthRe
 port","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNodeState(org.apache.hadoop.yarn.api.records.NodeState)":{"name":"setNodeState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeState"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeState getNodeState()":{"name":"getNodeState"
 ,"returnType":"org.apache.hadoop.yarn.api.records.NodeState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long, java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long","java.util.Set"],"exceptions":[]},"java.lang.String getHttpAddress()":{"name":"getHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"
 exceptions":[]},"java.util.Set getNodeLabels()":{"name":"getNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Priority":{"name":"org.apache.hadoop.yarn.api.records.Priority","methods":{"void setPriority(int)":{"name":"setPriority","returnType":"void","args":["int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.Priority)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":["int"],"exceptions":[]},"int getPriority()":{"name":"getPriority","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)
 ":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"int getAttemptId()":{"name":"getAttemptId","returnType":"int","args":[],"except
 ions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":["org.apache.hadoop.yarn.api.records.ApplicationId","int"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NMToken":{"name":"org.apache.hadoop.yarn.api.records.NMToken","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":
 "org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getToken()":{"name":"getToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NMToken newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NMToken","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicati
 onMasterRequest","methods":{"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest newInstance(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String",
 "java.lang.String"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetC
 lusterMetricsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","methods":{"java.util.List getStartContainerRequests()":{"name":"getStartContainerRequests","returnType":"java.util.List","args":[],"exceptions":[]},"void setStartContainerRequests(java.util.List)":{"name":"setStartContainerRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext":{"name":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","methods":{"ja
 va.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.Map getServiceData()":{"name":"getServiceData","returnType":"java.util.Map","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.Map getLocalResources()":{"name":"getLocalResources","returnType":"java.util.Map","args":[],"exceptions":[]},"void setServiceData(java.util.Map)":{"name":"setServiceData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext newInstance(java.util.Map, java.util.Map, java.util.List, java.util.Map, java.nio.ByteBuffer, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":["java.util.Map","java.util.Map","java.util.List","java.util.Map","java.nio.ByteBuffer","java.util.Map"],"exceptions"
 :[]},"java.util.Map getEnvironment()":{"name":"getEnvironment","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getCommands()":{"name":"getCommands","returnType":"java.util.List","args":[],"exceptions":[]},"java.nio.ByteBuffer getTokens()":{"name":"getTokens","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setLocalResources(java.util.Map)":{"name":"setLocalResources","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setCommands(java.util.List)":{"name":"setCommands","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setTokens(java.nio.ByteBuffer)":{"name":"setTokens","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setEnvironment(java.util.Map)":{"name":"setEnvironment","returnType":"void","args":["java.util.Map"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportReque
 st","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","methods":{"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"excep
 tions":[]},"void setSuccessfullyStoppedContainers(java.util.List)":{"name":"setSuccessfullyStoppedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getSuccessfullyStoppedContainers()":{"name":"getSuccessfullyStoppedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueState":{"name":"org.apache.hadoop.yarn.api.records.QueueState","methods":{"org.apache.hadoop.yarn.api.records.QueueState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":["
 java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationId","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId newInstance(long, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["long","int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"long getClusterTimestamp()":{"name":"getClusterTimestamp","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"
 ],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getClusterMetrics()":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse newInstance(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exc
 eptions":[]},"void setClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"setClusterMetrics","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["java.util.List"],"exceptions":[]},"java.util.List getUserAclsInfoList()":{"name":"getUserAclsInfoList","returnType":"java.util.List","args":[],"exceptions":[]},"void setUserAclsInfoList(java.util.List)":{"name":"setUserAclsInfoList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus":{"name":"org.apache.hadoop.
 yarn.api.records.FinalApplicationStatus","methods":{"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","methods":{"java.util.List getApplicationList()":{"name":"getApplicationList","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","args":["java.util.List"],"exceptions":[]},"voi
 d setApplicationList(java.util.List)":{"name":"setApplicationList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationState","methods":{"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.URL":{"name":"org.apache.hadoop.yarn.api.records.URL","methods":{"java.lang.String getFile()":{"name":"getFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPort(int)":{"name":"setPort","returnType":"void","args":["int"],"exceptions":[]},"void setUserInfo(java.la
 ng.String)":{"name":"setUserInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setScheme(java.lang.String)":{"name":"setScheme","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFile(java.lang.String)":{"name":"setFile","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUserInfo()":{"name":"getUserInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL newInstance(java.lang.String, java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.URL","args":["java.lang.String","
 java.lang.String","int","java.lang.String"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","methods":{"void setIsUnregistered(boolean)":{"name":"setIsUnregistered","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["boolean"],"exceptions":[]},"boolean getIsUnregistered()":{"name":"getIsUnregistered","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationMasterProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationMasterProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse fi
 nishApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"finishApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.Yarn
 Exception","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","methods":{"void setApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"setApplicationReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport()":{"name":"getApplicationReport","returnType
 ":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds"
 ,"returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","methods":{"void setStartRange(long, long) throws java.lang.IllegalArgumentException":{"name":"setStartRange","returnType":"void","args":["long","long"],"exceptions":["java.lang.IllegalArgumentException"]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope getScope()":{"name":"getScope","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","args":[],"exceptions":[]},"void setScope(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"setScope","returnType":"void","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protoco
 lrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope, java.util.Set, java.util.Set, java.util.Set, java.util.Set, java.util.EnumSet, org.apache.commons.lang.math.LongRange, org.apache.commons.lang.math.LongRange, java.lang.Long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","java.util.Set","java.util.Set","java.util.Set","java.util.Set","java.util.EnumSet","org.apache.commons.lang.math.LongRange","org.apache.commons.lang.math.LongRange","java.lang.Lo
 ng"],"exceptions":[]},"long getLimit()":{"name":"getLimit","returnType":"long","args":[],"exceptions":[]},"java.util.EnumSet getApplicationStates()":{"name":"getApplicationStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setFinishRange(org.apache.commons.lang.math.LongRange)":{"name":"setFinishRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"void setUsers(java.util.Set)":{"name":"setUsers","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getFinishRange()":{"name":"getFinishRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setApplicationStates(java.util.EnumSet)":{"name":"setApplicationStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.pro
 tocolrecords.GetApplicationsRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.EnumSet"],"exceptions":[]},"java.util.Set getQueues()":{"name":"getQueues","returnType":"java.util.Set","args":[],"exceptions":[]},"java.util.Set getUsers()":{"name":"getUsers","returnType":"java.util.Set","args":[],"exceptions":[]},"void setLimit(long)":{"name":"setLimit","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set, java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set","java.util.EnumSet"],"exceptions":[]},"void setApplicationStates(java.util.Set)":{"name":"setApplicationStates","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getStartRange()":{"name":
 "getStartRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTypes(java.util.Set)":{"name":"setApplicationTypes","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setQueues(java.util.Set)":{"name":"setQueues","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setFinishRange(long, long)":{"name":"setFinishRange","returnType":"void","args":["long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set"],"exceptions":[]},"void setStartRange(org.apache.commons.lang.math.LongRange)":{"name":"setStartRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance()":{"name":"newInstance","returnType":"org
 .apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":[],"exceptions":[]},"java.util.Set getApplicationTypes()":{"name":"getApplicationTypes","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceType":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceType","methods":{"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","methods":{"long getVcoreSeconds()":{"name":"getVcoreSeconds","returnType":"long","args":[],"exceptions":[]},"i
 nt getNumUsedContainers()":{"name":"getNumUsedContainers","returnType":"int","args":[],"exceptions":[]},"long getMemorySeconds()":{"name":"getMemorySeconds","returnType":"long","args":[],"exceptions":[]},"void setMemorySeconds(long)":{"name":"setMemorySeconds","returnType":"void","args":["long"],"exceptions":[]},"void setUsedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setNeededResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setNeededResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getReservedResources()":{"name":"getReservedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getNeededResources()":{"name":"getNeededResources","returnType":"org.apa
 che.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNumUsedContainers(int)":{"name":"setNumUsedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setNumReservedContainers(int)":{"name":"setNumReservedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setVcoreSeconds(long)":{"name":"setVcoreSeconds","returnType":"void","args":["long"],"exceptions":[]},"int getNumReservedContainers()":{"name":"getNumReservedContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport newInstance(int, int, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":["int","int","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop
 .yarn.api.records.Resource","long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsedResources()":{"name":"getUsedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setReservedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setReservedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest newInstance(java.lang.String, boolean, boolean, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","args":["java.lang.String","boolean","boolean","boolean"],"excep
 tions":[]},"boolean getRecursive()":{"name":"getRecursive","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setIncludeChildQueues(boolean)":{"name":"setIncludeChildQueues","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIncludeApplications()":{"name":"getIncludeApplications","returnType":"boolean","args":[],"exceptions":[]},"boolean getIncludeChildQueues()":{"name":"getIncludeChildQueues","returnType":"boolean","args":[],"exceptions":[]},"void setRecursive(boolean)":{"name":"setRecursive","returnType":"void","args":["boolean"],"exceptions":[]},"void setIncludeApplications(boolean)":{"name":"setIncludeApplications","returnType":"void","args":["boolean"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","methods":{"void setIncreasedContainers(ja
 va.util.List)":{"name":"setIncreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setDecreasedContainers(java.util.List)":{"name":"setDecreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","java.util.List","java.util.List"],"exceptions":[]},"void setUpdatedNodes(java.util.Li
 st)":{"name":"setUpdatedNodes","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"java.util.List getNMTokens()":{"name":"getNMTokens","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getUpdatedNodes()":{"name":"getUpdatedNodes","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getIncreasedContainers()":{"name":"getIncreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.AMCommand getAMCommand()":{"name":"getAMCommand","returnType":"org.apache.hadoop.yarn.api.records.AMCommand","args":[],"exceptions":[]},"void setNMTokens(java.util.List)":{"name":"setNMTokens","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"e
 xceptions":[]},"void setAMCommand(org.apache.hadoop.yarn.api.records.AMCommand)":{"name":"setAMCommand","returnType":"void","args":["org.apache.hadoop.yarn.api.records.AMCommand"],"exceptions":[]},"void setAllocatedContainers(java.util.List)":{"name":"setAllocatedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"int getNumClusterNodes()":{"name":"getNumClusterNodes","returnType":"int","args":[],"exceptions":[]},"void setNumClusterNodes(int)":{"name":"setNumClusterNodes","returnType":"void","args":["int"],"exceptions":[]},"void setCompletedContainersStatuses(java.util.List)":{"name":"setCompletedContainersStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.PreemptionMessage getPreemptionMessage()":{"name":"getPreemptionMessage","re
 turnType":"org.apache.hadoop.yarn.api.records.PreemptionMessage","args":[],"exceptions":[]},"java.util.List getCompletedContainersStatuses()":{"name":"getCompletedContainersStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setAvailableResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setAvailableResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, org.apache.hadoop.yarn.api.records.Token, java.util.List, java.util.List)":{"na
 me":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","org.apache.hadoop.yarn.api.records.Token","java.util.List","java.util.List"],"exceptions":[]},"void setPreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage)":{"name":"setPreemptionMessage","returnType":"void","args":["org.apache.hadoop.yarn.api.records.PreemptionMessage"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.pro
 tocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List"],"exceptions":[]},"java.util.List getAllocatedContainers()":{"name":"getAllocatedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"java.util.List getDecreasedContainers()":{"name":"getDecreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","methods":{"org.apache.hadoop.yarn.api.records.LocalResourceVisibility valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":["java.lang.String"],"excep
 tions":[]},"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUserAcls(java.util.List)":{"name":"setUserAcls","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getUserAcls()":{"name":"getUserAcls","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo newInstance(java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","a
 rgs":["java.lang.String","java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","methods":{"void setRenewer(java.lang.String)":{"name":"setRenewer","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest newInstance(java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRenewer()":{"name":"getRenewer","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerState":{"name":"org.apache.hadoop.yarn.api.records.ContainerState","methods":{"org.apache.hadoop.yarn.api.records.ContainerState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","a
 rgs":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.ContainerState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ContainerState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ContainerManagementProtocol":{"name":"org.apache.hadoop.yarn.api.ContainerManagementProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse getContainerStatuses(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerSt
 atuses","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse startContainers(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse stopContainers(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name
 ":"stopContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.exceptions.YarnException":{"name":"org.apache.hadoop.yarn.exceptions.YarnException","methods":{}},"org.apache.hadoop.yarn.api.records.QueueInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setCurrentCapacity(float)":{"name":"setCurrentCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setCapacity(float)":{"name":"setCapacity","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getChildQueues()":{"
 name":"getChildQueues","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo newInstance(java.lang.String, float, float, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.QueueState, java.util.Set, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String","float","float","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.QueueState","java.util.Set","java.lang.String"],"exceptions":[]},"void setDefaultNodeLabelExpression(java.lang.String)":{"name":"setDefaultNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.util.List getApplications()":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":[]},"float getCapacity()":{"name":"getCapacity","returnType":"float","args":[],"exceptions":[]},"float getCurrentCapacity()":{"name":"getCurrentCapacity","returnType
 ":"float","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueState getQueueState()":{"name":"getQueueState","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":[],"exceptions":[]},"void setChildQueues(java.util.List)":{"name":"setChildQueues","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setApplications(java.util.List)":{"name":"setApplications","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getDefaultNodeLabelExpression()":{"name":"getDefaultNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumCapacity(float)":{"name":"setMaximumCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setQueueState(org.apache.hadoop.yarn.api.records.QueueState)":{"name":"setQueueState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueState"],"exceptions":[]},"void setAccessibleNodeLabels(java.util.Set)":{"name":"setAccessibleNodeLabels
 ","returnType":"void","args":["java.util.Set"],"exceptions":[]},"float getMaximumCapacity()":{"name":"getMaximumCapacity","returnType":"float","args":[],"exceptions":[]},"java.util.Set getAccessibleNodeLabels()":{"name":"getAccessibleNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","methods":{"void setRMDelegationToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setRMDelegationToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse newInstance(org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.To
 ken getRMDelegationToken()":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeId":{"name":"org.apache.hadoop.yarn.api.records.NodeId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId newInstance(java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":["java.lang.String","int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int",
 "args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"void setApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"setApplicationSubmissionContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"excep
 tions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["boolean"],"exceptions":[]},"void setIsKillCompleted(boolean)":{"name":"setIsKillCompleted","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIsKillCompleted()":{"name":"getIsKillCompleted","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationClientProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationClientProtocol"
 ,"methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplication(org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNewApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse getClusterNodes(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse getLabelsToNodes(org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest"],"exceptions":["org.apache.hadoop.yar
 n.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop
 .yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse getQueueUserAcls(org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueUserAcls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicat
 ionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse forceKillApplication(org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"forceKillApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse getNodeToLabels(org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"org.apache.hadoop.yarn.api.protocolr
 ecords.GetNodesToLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse getQueueInfo(org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.ha
 doop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse getClusterMetrics(org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse getClusterNodeLabels(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabe
 ls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse
 ":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.ya
 rn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerId":{"name":"org.apache.hadoop.yarn.api.records.ContainerId","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newInstance(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","int"],"exceptions":[]
 },"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"long getContainerId()":{"name":"getContainerId","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newContainerId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, long)":{"name":"newContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getApplicationAttemptId()":{"name":"getApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId fromString(java.lang.String)":{"name":"fromString","returnType":"org.apache.
 hadoop.yarn.api.records.ContainerId","args":["java.lang.String"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Container":{"name":"org.apache.hadoop.yarn.api.records.Container","methods":{"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"java.lang.String getNodeHttpAddress()":{"name":"getNodeHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getId()":{"name":"getId","returnType":"org.apache.hadoop.y
 arn.api.records.ContainerId","args":[],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Container newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.record
 s.Priority, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Container","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNodeHttpAddress(java.lang.String)":{"name":"setNod
 eHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean","java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"s
 etCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int"],"exceptions":[]},"void setRelaxLocality(boolean)":{"name":"setRelaxLocality","returnType":"void","args":["boolean"],"exceptions":[]},"void setResourceName(java.lang.String)":{"name":"setResourceName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.R
 esource","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean"],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getR
 elaxLocality()":{"name":"getRelaxLocality","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"java.lang.String getResourceName()":{"name":"getResourceName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"boolean isAnyLocation(java.lang.String)":{"name":"isAnyLocation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerStatus":{"name":"org.apache.hadoop.yarn.api.records.ContainerStatus","methods":{"org.apache.hadoop.yarn.api.records.ContainerStat
 e getState()":{"name":"getState","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getContainerId()":{"name":"getContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"int getExitStatus()":{"name":"getExitStatus","returnType":"int","args":[],"exceptions":[]},"void setExitStatus(int)":{"name":"setExitStatus","returnType":"void","args":["int"],"exceptions":[]},"void setState(org.apache.hadoop.yarn.api.records.ContainerState)":{"name":"setState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerState"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerStatus newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.ContainerState, java.lang.String, int
 )":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yar

<TRUNCATED>

[39/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
new file mode 100644
index 0000000..d3861b9
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
@@ -0,0 +1,3 @@
+mapred
+yarn
+container-executor

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
new file mode 100644
index 0000000..f62ee8e
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnExceptio
 n, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apach
 e.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache
 .hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, 
 java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apac
 he.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.
 yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util
 .Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.rec
 ords.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args
 ":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"v
 oid setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunch
 Context"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.
 async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Prior
 ity","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.ap
 ache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],
 "exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":
 "allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport g
 etApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org
 .apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["or
 g.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.uti
 l.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.r
 ecords.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws
  org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptio
 ns":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOExc
 eption":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient creat
 eNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void 
 cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
new file mode 100644
index 0000000..b394bff
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTo
 kenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long",
 "args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","r
 eturnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService
 (org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.had
 oop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
new file mode 100644
index 0000000..26613d4
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
@@ -0,0 +1,38 @@
+netty-3\.6\.2\.Final[\.\-_].*jar
+leveldbjni-all-1\.8[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+jackson-xc-1\.9\.13[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+stax-api-1\.0-2[\.\-_].*jar
+zookeeper-3\.4\.6[\.\-_].*jar
+guice-3\.0[\.\-_].*jar
+jaxb-impl-2\.2\.3-1[\.\-_].*jar
+zookeeper-3\.4\.6.*-tests\.jar
+jersey-client-1\.9[\.\-_].*jar
+commons-cli-1\.2[\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+guava-11\.0\.2[\.\-_].*jar
+jetty-6\.1\.26[\.\-_].*jar
+commons-logging-1\.1\.3[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+jersey-guice-1\.9[\.\-_].*jar
+commons-compress-1\.4\.1[\.\-_].*jar
+jettison-1\.1[\.\-_].*jar
+commons-collections-3\.2\.[12][\.\-_].*jar
+xz-1\.0[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+commons-codec-1\.4[\.\-_].*jar
+aopalliance-1\.0[\.\-_].*jar
+javax\.inject-1[\.\-_].*jar
+commons-lang-2\.6[\.\-_].*jar
+jetty-util-6\.1\.26[\.\-_].*jar
+jsr305-3\.0\.0[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+activation-1\.1[\.\-_].*jar
+jersey-json-1\.9[\.\-_].*jar
+jaxb-api-2\.2\.2[\.\-_].*jar
+guice-servlet-3\.0[\.\-_].*jar
+servlet-api-2\.5[\.\-_].*jar
+jackson-jaxrs-1\.9\.13[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
new file mode 100644
index 0000000..bb88005
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
@@ -0,0 +1,74 @@
+hadoop-yarn-server-sharedcachemanager.*\.jar
+bin
+bin/mapred
+bin/container-executor
+bin/yarn
+sbin
+sbin/yarn-daemon\.sh
+sbin/yarn-daemons\.sh
+hadoop-yarn-registry-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-unmanaged-am-launcher-2\.7\.[0-9].*\.jar
+hadoop-yarn-common-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-nodemanager.*\.jar
+hadoop-yarn-server-applicationhistoryservice-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-common.*\.jar
+etc
+etc/hadoop
+hadoop-yarn-server-common-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-tests.*\.jar
+hadoop-yarn-server-resourcemanager.*\.jar
+hadoop-yarn-server-web-proxy.*\.jar
+hadoop-yarn-api-2\.7\.[0-9].*\.jar
+hadoop-yarn-common.*\.jar
+hadoop-yarn-server-web-proxy-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-distributedshell-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-tests-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-resourcemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-registry.*\.jar
+hadoop-yarn-server-sharedcachemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-client-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-distributedshell.*\.jar
+hadoop-yarn-server-nodemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-api.*\.jar
+hadoop-yarn-client.*\.jar
+lib
+lib/commons-cli-1\.2.*\.jar
+lib/leveldbjni-all-1\.8.*\.jar
+lib/jaxb-api-2\.2\.2.*\.jar
+lib/jettison-1\.1.*\.jar
+lib/commons-io-2\.4.*\.jar
+lib/jetty-util-6\.1\.26.*\.jar
+lib/jaxb-impl-2\.2\.3-1.*\.jar
+lib/jersey-guice-1\.9.*\.jar
+lib/netty-3\.6\.2\.Final.*\.jar
+lib/jersey-core-1\.9.*\.jar
+lib/jackson-mapper-asl-1\.9\.13.*\.jar
+lib/asm-3\.2.*\.jar
+lib/commons-compress-1\.4\.1.*\.jar
+lib/aopalliance-1\.0.*\.jar
+lib/jackson-xc-1\.9\.13.*\.jar
+lib/jersey-json-1\.9.*\.jar
+lib/commons-codec-1\.4.*\.jar
+lib/jackson-core-asl-1\.9\.13.*\.jar
+lib/servlet-api-2\.5.*\.jar
+lib/jetty-6\.1\.26.*\.jar
+lib/jersey-server-1\.9.*\.jar
+lib/log4j-1\.2\.17.*\.jar
+lib/zookeeper-3\.4\.6.*-tests\.jar
+lib/stax-api-1\.0-2.*\.jar
+lib/jersey-client-1\.9.*\.jar
+lib/xz-1\.0.*\.jar
+lib/zookeeper-3\.4\.6.*\.jar
+lib/activation-1\.1.*\.jar
+lib/javax\.inject-1.*\.jar
+lib/protobuf-java-2\.5\.0.*\.jar
+lib/guice-3\.0.*\.jar
+lib/guava-11\.0\.2.*\.jar
+lib/jsr305-3\.0\.0.*\.jar
+lib/jackson-jaxrs-1\.9\.13.*\.jar
+lib/commons-collections-3\.2\.[1-2].*\.jar
+lib/commons-logging-1\.1\.3.*\.jar
+lib/commons-lang-2\.6.*\.jar
+lib/guice-servlet-3\.0.*\.jar
+hadoop-yarn-server-applicationhistoryservice.*\.jar
+hadoop-yarn-applications-unmanaged-am-launcher.*\.jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
new file mode 100644
index 0000000..339de4c
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
@@ -0,0 +1,430 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+specs {
+  tests {
+    'HADOOP_EJH1' {
+      name = 'HADOOP_EJH1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'JAVA_HOME'
+      }
+    }
+    'HADOOP_EC1' {
+      name = 'HADOOP_EC1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_TOOLS_PATH'
+        donotcheckexistance = true
+      }
+    }
+    'HADOOP_EC2' {
+      name = 'HADOOP_EC2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_HOME'
+      }
+    }
+    'HADOOP_EC3' {
+      name = 'HADOOP_EC3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EC4' {
+      name = 'HADOOP_EC4'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EC5' {
+      name = 'HADOOP_EC5'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_CONF_DIR'
+      }
+    }
+    'HADOOP_EH1' {
+      name = 'HADOOP_EH1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HADOOP_HDFS_HOME'
+      }
+    }
+    'HADOOP_EH2' {
+      name = 'HADOOP_EH2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HDFS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EH3' {
+      name = 'HADOOP_EH3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HDFS_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EY1' {
+      name = 'HADOOP_EY1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'HADOOP_YARN_HOME'
+      }
+    }
+    'HADOOP_EY2' {
+      name = 'HADOOP_EY2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'YARN_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EY3' {
+      name = 'HADOOP_EY3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'YARN_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EM1' {
+      name = 'HADOOP_EM1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'HADOOP_MAPRED_HOME'
+      }
+    }
+    'HADOOP_EM2' {
+      name = 'HADOOP_EM2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'MAPRED_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EM3' {
+      name = 'HADOOP_EM3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'MAPRED_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EJH2_HADOOP' {
+      name = 'HADOOP_EJH2_HADOOP'
+      type = 'shell'
+      arguments {
+        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/hadoop-env.sh'
+        message = 'JAVA_HOME is not set'
+      }
+    }
+    'HADOOP_EJH2_YARN' {
+      name = 'HADOOP_EJH2_YARN'
+      type = 'shell'
+      arguments {
+        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/yarn-env.sh'
+        message = 'JAVA_HOME is not set'
+      }
+    }
+    'HADOOP_PLATVER_1' {
+      name = 'HADOOP_PLATVER'
+      type = 'shell'
+      arguments {
+        command = 'hadoop version | head -n 1 | grep -E \'Hadoop\\s+[0-9\\.]+[_\\-][A-Za-z_0-9]+\''
+        message = 'Hadoop\'s version string is not correct'
+      }
+    }
+    'HADOOP_DIRSTRUCT_COMMON' {
+      name = 'HADOOP_DIRSTRUCT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        referenceList = 'hadoop-common.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_HDFS' {
+      name = 'HADOOP_DIRSTRUCT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        referenceList = 'hadoop-hdfs.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_MAPRED' {
+      name = 'HADOOP_DIRSTRUCT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        referenceList = 'hadoop-mapreduce.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_YARN' {
+      name = 'HADOOP_DIRSTRUCT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        referenceList = 'hadoop-yarn.list'
+      }
+    }
+    'HADOOP_SUBPROJS' {
+      name = 'HADOOP_SUBPROJS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        referenceList = 'hadoop-subprojs.list'
+      }
+    }
+    'HADOOP_BINCONTENT_COMMON' {
+      name = 'HADOOP_BINCONTENT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-common-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_HDFS' {
+      name = 'HADOOP_BINCONTENT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-hdfs-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_MAPRED' {
+      name = 'HADOOP_BINCONTENT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-mapreduce-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_YARN' {
+      name = 'HADOOP_BINCONTENT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-yarn-bin.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_COMMON' {
+      name = 'HADOOP_JARCONTENT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        subDirEnv = 'HADOOP_COMMON_LIB_JARS_DIR'
+        referenceList = 'hadoop-common-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_HDFS' {
+      name = 'HADOOP_JARCONTENT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        subDirEnv = 'HDFS_LIB_JARS_DIR'
+        referenceList = 'hadoop-hdfs-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_MAPRED' {
+      name = 'HADOOP_JARCONTENT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        subDirEnv = 'MAPRED_LIB_JARS_DIR'
+        referenceList = 'hadoop-mapreduce-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_YARN' {
+      name = 'HADOOP_JARCONTENT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        subDirEnv = 'YARN_LIB_JARS_DIR'
+        referenceList = 'hadoop-yarn-jar.list'
+      }
+    }
+    'HADOOP_GETCONF' {
+      name = 'HADOOP_GETCONF'
+      type = 'shell'
+      arguments {
+        command = '[ `hdfs getconf -confKey dfs.permissions.superusergroup >/dev/null 2>/dev/null; echo $?` == "0" ]'
+        message = 'It\' not possible to to determine key Hadoop configuration values by using ${HADOOP_HDFS_HOME}/bin/hdfs getconf'
+      }
+    }
+    'HADOOP_CNATIVE1' {
+      name = 'HADOOP_CNATIVE1'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep hadoop | grep true'
+        message = 'hadoop-common-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_CNATIVE2' {
+      name = 'HADOOP_CNATIVE2'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
+        message = 'hadoop-common-project must be build with -Prequire.snappy'
+      }
+    }
+    'HADOOP_HNATIVE1' {
+      name = 'HADOOP_HNATIVE1'
+      type = 'shell'
+      arguments {
+        command = '[ ! -n ${HADOOP_COMMON_HOME} ] || HADOOP_COMMON_HOME=`hadoop envvars | grep HADOOP_COMMON_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
+            'test -e $HADOOP_COMMON_HOME/lib/native/libhdfs.a'
+        message = 'hadoop-hdfs-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_YNATIVE1' {
+      name = 'HADOOP_YNATIVE1'
+      type = 'shell'
+      arguments {
+        command = '[ ! -n ${HADOOP_YARN_HOME} ] || HADOOP_YARN_HOME=`yarn envvars | grep HADOOP_YARN_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
+            'echo $HADOOP_YARN_HOME; test -e $HADOOP_YARN_HOME/bin/container-executor'
+        message = 'hadoop-yarn-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_MNATIVE1' {
+      name = 'HADOOP_MNATIVE1'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
+        message = 'hadoop-mapreduce-project must be build with -Prequire.snappy'
+      }
+    }
+    'HADOOP_COMPRESSION' {
+      name = 'HADOOP_COMPRESSION'
+      type = 'shell'
+      arguments {
+        command = '[[ "$(hadoop checknative -a 2>/dev/null | egrep -e ^zlib -e ^snappy | sort -u | grep true | wc -l)" == 2 ]]'
+        message = 'hadoop must be built with -Dcompile.native=true'
+      }
+    }
+    'HADOOP_TOOLS' {
+      name = 'HADOOP_TOOLS'
+      type = 'hadoop_tools'
+      arguments {
+      }
+    }
+    'HADOOP_API1' {
+      name = "HADOOP_API1"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        libDir = 'HADOOP_COMMON_DIR'
+        envcmd = 'hadoop envvars'
+        jar = 'hadoop-common'
+        resourceFile = 'hadoop-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API2' {
+      name = "HADOOP_API2"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        libDir = 'HDFS_DIR'
+        envcmd = 'hdfs envvars'
+        jar = 'hadoop-hdfs'
+        resourceFile = 'hadoop-hdfs-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API3' {
+      name = "HADOOP_API3"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-common'
+        resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API4' {
+      name = "HADOOP_API4"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-client'
+        resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API5' {
+      name = "HADOOP_API5"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-api'
+        resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API6' {
+      name = "HADOOP_API6"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        libDir = 'MAPRED_DIR'
+        envcmd = 'mapred envvars'
+        jar = 'hadoop-mapreduce-client-core'
+        resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json'
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/README.md
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/README.md b/bigtop-tests/spec-tests/README.md
deleted file mode 100644
index 8fde997..0000000
--- a/bigtop-tests/spec-tests/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Test suite to validate Hadoop basic specifications
-==================================================
-
-The test suite is intended to be used as a validation tool to make sure that a
-Hadoop stack derived from Apache Bigtop is still compliant with it. The
-minimalistic way of doing so would be to guarantee compatibility of the
-environment, binaries layouts, certain configuration parameters, and so on.
-
-Validation test suite for the specs is vaguely based on Apache Bigtop iTest and
-consists of two essential parts: a configuration file, communicating the 
-functional commands and expected outcome(s) of it; and the test driver to run
-the commands and compare the results.
- 
-Running the tests
-=================
-
-Tests could be executed by running the following command 
-```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
-```
-=======
-consists of two essential parts: a configuration file, communicating the
-functional commands and expected outcome(s) of it; and the test driver to run
-the commands and compare the results.
-
-Running the tests
-=================
-
-Tests could be executed by running the following command
-```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
-```
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/build.gradle b/bigtop-tests/spec-tests/build.gradle
deleted file mode 100644
index b0a6715..0000000
--- a/bigtop-tests/spec-tests/build.gradle
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-subprojects {
-  /**
-   *  Utility function for tests to use to confirm EVN Variables.
-   */
-  ext.checkEnv = { env_vars ->
-    env_vars.each() {
-      def value = System.getenv("${it}")
-      if (value == null || value == "null")
-        throw new GradleException("undeclared env variable: ${it}")
-    }
-  }
-
-  ext.groovyVersion = '1.8.0'
-  ext.hadoopVersion = '2.6.0'
-  // itest needs be greater than or equal to = 1.0.0
-  ext.itestVersion = '1.0.0' // Might need to be able to read an input for alternate version?
-  ext.BIGTOP_HOME   = rootDir
-
-  dependencies {
-    compile group: 'org.apache.bigtop.itest', name: 'itest-common', version: itestVersion, transitive: 'true'
-    //needed to avoid groovy not on classpath error.
-    testCompile group: 'org.codehaus.groovy', name: 'groovy', version: groovyVersion
-    testRuntime project(':bigtop-tests:smoke-tests:logger-test-config')
-  }
-
-  test.doFirst {
-    // TestHadoopExamples and other tests rely on BIGTOP_HOME environment
-    // variable to find some resources. Let's set it up, using ext.BIGTOP_HOME
-    environment ("BIGTOP_HOME", BIGTOP_HOME)
-  }
-
-  test << {
-    println("Now testing...");
-    //todo, add back in 'basic' after BIGTOP-1392 .
-    testLogging {
-      events "passed", "skipped", "failed"
-    }
-  }
-
-  // Let's make sure all system Properties are passed into the forked test JVM
-  tasks.withType(Test) {
-    systemProperties = System.getProperties()
-  }
-  test.dependsOn compileGroovy
-  compileGroovy.dependsOn clean
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
deleted file mode 100644
index 97e3635..0000000
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-def junitVersion = '4.11'
-
-apply plugin: 'java'
-
-repositories {
-  maven {
-    url "http://conjars.org/repo/"
-  }
-}
-dependencies {
-  compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
-  compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
-  compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3'
-  compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
-  compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
-  compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
-  compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
-  compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
-  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
-  compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
-  compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
-  testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
-  testCompile "junit:junit:4.11"
-  if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
-}
-
-jar {
-    from {
-        (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect {
-              zipTree(it)
-        }
-    }
-
-    exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
-}
-
-test {
-  // Change the default location where test data is picked up
-  systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
-  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
-  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
-}
-test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
deleted file mode 100644
index d95c010..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hadoop;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * A tool that generates API conformance tests for Hadoop libraries
- */
-public class ApiExaminer {
-
-  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
-
-  static private Set<String> unloadableClasses;
-
-  private List<String> errors;
-  private List<String> warnings;
-
-  static {
-    unloadableClasses = new HashSet<>();
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
-    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
-    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
-
-  }
-
-  public static void main(String[] args) {
-    Options options = new Options();
-
-    options.addOption("c", "compare", true,
-        "Compare against a spec, argument is the json file containing spec");
-    options.addOption("h", "help", false, "You're looking at it");
-    options.addOption("j", "jar", true, "Jar to examine");
-    options.addOption("p", "prepare-spec", true,
-        "Prepare the spec, argument is the directory to write the spec to");
-
-    try {
-      CommandLine cli = new GnuParser().parse(options, args);
-
-      if (cli.hasOption('h')) {
-        usage(options);
-        return;
-      }
-
-      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
-          (cli.hasOption('c') && cli.hasOption('p'))) {
-        System.err.println("You must choose either -c or -p");
-        usage(options);
-        return;
-      }
-
-      if (!cli.hasOption('j')) {
-        System.err.println("You must specify the jar to prepare or compare");
-        usage(options);
-        return;
-      }
-
-      String jar = cli.getOptionValue('j');
-      ApiExaminer examiner = new ApiExaminer();
-
-      if (cli.hasOption('c')) {
-        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
-      } else if (cli.hasOption('p')) {
-        examiner.prepareExpected(jar, cli.getOptionValue('p'));
-      }
-    } catch (Exception e) {
-      System.err.println("Received exception while processing");
-      e.printStackTrace();
-    }
-  }
-
-  private static void usage(Options options) {
-    HelpFormatter help = new HelpFormatter();
-    help.printHelp("api-examiner", options);
-
-  }
-
-  private ApiExaminer() {
-  }
-
-  private void prepareExpected(String jarFile, String outputDir) throws IOException,
-      ClassNotFoundException {
-    JarInfo jarInfo = new JarInfo(jarFile, this);
-    jarInfo.dumpToFile(new File(outputDir));
-  }
-
-  private void compareAgainstStandard(String json, String jarFile) throws IOException,
-      ClassNotFoundException {
-    errors = new ArrayList<>();
-    warnings = new ArrayList<>();
-    JarInfo underTest = new JarInfo(jarFile, this);
-    JarInfo standard = jarInfoFromFile(new File(json));
-    standard.compareAndReport(underTest);
-
-    if (errors.size() > 0) {
-      System.err.println("Found " + errors.size() + " incompatibilities:");
-      for (String error : errors) {
-        System.err.println(error);
-      }
-    }
-
-    if (warnings.size() > 0) {
-      System.err.println("Found " + warnings.size() + " possible issues: ");
-      for (String warning : warnings) {
-        System.err.println(warning);
-      }
-    }
-
-
-  }
-
-  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
-    jarInfo.patchUpClassBackPointers(this);
-    return jarInfo;
-  }
-
-  private static class JarInfo {
-    String name;
-    String version;
-    ApiExaminer container;
-    Map<String, ClassInfo> classes;
-
-    // For use by Jackson
-    public JarInfo() {
-
-    }
-
-    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
-      this.container = container;
-      LOG.info("Processing jar " + jarFile);
-      File f = new File(jarFile);
-      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
-      Matcher matcher = pattern.matcher(f.getName());
-      if (!matcher.matches()) {
-        String msg = "Unable to determine name and version from " + f.getName();
-        LOG.error(msg);
-        throw new RuntimeException(msg);
-      }
-      name = matcher.group(1);
-      version = matcher.group(2);
-      classes = new HashMap<>();
-
-      JarFile jar = new JarFile(jarFile);
-      Enumeration<JarEntry> entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        String name = entries.nextElement().getName();
-        if (name.endsWith(".class")) {
-          name = name.substring(0, name.length() - 6);
-          name = name.replace('/', '.');
-          if (!unloadableClasses.contains(name)) {
-            LOG.debug("Processing class " + name);
-            Class<?> clazz = Class.forName(name);
-            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
-                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
-              classes.put(name, new ClassInfo(this, clazz));
-            }
-          }
-        }
-      }
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public String getVersion() {
-      return version;
-    }
-
-    public void setVersion(String version) {
-      this.version = version;
-    }
-
-    public Map<String, ClassInfo> getClasses() {
-      return classes;
-    }
-
-    public void setClasses(Map<String, ClassInfo> classes) {
-      this.classes = classes;
-    }
-
-    void compareAndReport(JarInfo underTest) {
-      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
-      for (ClassInfo classInfo : classes.values()) {
-        if (underTestClasses.contains(classInfo)) {
-          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
-          underTestClasses.remove(classInfo);
-        } else {
-          container.errors.add(underTest + " does not contain class " + classInfo);
-        }
-      }
-
-      if (underTestClasses.size() > 0) {
-        for (ClassInfo extra : underTestClasses) {
-          container.warnings.add(underTest + " contains extra class " + extra);
-        }
-      }
-    }
-
-    void dumpToFile(File outputDir) throws IOException {
-      File output = new File(outputDir, name + "-" + version + "-api-report.json");
-      ObjectMapper mapper = new ObjectMapper();
-      mapper.writeValue(output, this);
-    }
-
-    void patchUpClassBackPointers(ApiExaminer container) {
-      this.container = container;
-      for (ClassInfo classInfo : classes.values()) {
-        classInfo.setJar(this);
-        classInfo.patchUpBackMethodBackPointers();
-      }
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof JarInfo)) return false;
-      JarInfo that = (JarInfo)other;
-      return name.equals(that.name) && version.equals(that.version);
-    }
-
-    @Override
-    public String toString() {
-      return name + "-" + version;
-    }
-  }
-
-  private static class ClassInfo {
-    @JsonIgnore JarInfo jar;
-    String name;
-    Map<String, MethodInfo> methods;
-
-    // For use by Jackson
-    public ClassInfo() {
-
-    }
-
-    ClassInfo(JarInfo jar, Class<?> clazz) {
-      this.jar = jar;
-      this.name = clazz.getName();
-      methods = new HashMap<>();
-
-      for (Method method : clazz.getMethods()) {
-        if (method.getDeclaringClass().equals(clazz)) {
-          LOG.debug("Processing method " + method.getName());
-          MethodInfo mi = new MethodInfo(this, method);
-          methods.put(mi.toString(), mi);
-        }
-      }
-    }
-
-    public JarInfo getJar() {
-      return jar;
-    }
-
-    public void setJar(JarInfo jar) {
-      this.jar = jar;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public Map<String, MethodInfo> getMethods() {
-      return methods;
-    }
-
-    public void setMethods(Map<String, MethodInfo> methods) {
-      this.methods = methods;
-    }
-
-    void compareAndReport(ClassInfo underTest) {
-      // Make a copy so we can remove them as we match them, making it easy to find additional ones
-      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
-      for (MethodInfo methodInfo : methods.values()) {
-        if (underTestMethods.contains(methodInfo)) {
-          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
-          underTestMethods.remove(methodInfo);
-        } else {
-          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
-        }
-      }
-
-      if (underTestMethods.size() > 0) {
-        for (MethodInfo extra : underTestMethods) {
-          jar.container.warnings.add(underTest + " contains extra method " + extra);
-        }
-      }
-    }
-
-    void patchUpBackMethodBackPointers() {
-      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof ClassInfo)) return false;
-      ClassInfo that = (ClassInfo)other;
-      return name.equals(that.name);  // Classes can be compared just on names
-    }
-
-    @Override
-    public int hashCode() {
-      return name.hashCode();
-    }
-
-    @Override
-    public String toString() {
-      return jar + " " + name;
-    }
-  }
-
-  private static class MethodInfo {
-    @JsonIgnore ClassInfo containingClass;
-    String name;
-    String returnType;
-    List<String> args;
-    Set<String> exceptions;
-
-    // For use by Jackson
-    public MethodInfo() {
-
-    }
-
-    MethodInfo(ClassInfo containingClass, Method method) {
-      this.containingClass = containingClass;
-      this.name = method.getName();
-      args = new ArrayList<>();
-      for (Class<?> argClass : method.getParameterTypes()) {
-        args.add(argClass.getName());
-      }
-      returnType = method.getReturnType().getName();
-      exceptions = new HashSet<>();
-      for (Class<?> exception : method.getExceptionTypes()) {
-        exceptions.add(exception.getName());
-      }
-    }
-
-    public ClassInfo getContainingClass() {
-      return containingClass;
-    }
-
-    public void setContainingClass(ClassInfo containingClass) {
-      this.containingClass = containingClass;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public String getReturnType() {
-      return returnType;
-    }
-
-    public void setReturnType(String returnType) {
-      this.returnType = returnType;
-    }
-
-    public List<String> getArgs() {
-      return args;
-    }
-
-    public void setArgs(List<String> args) {
-      this.args = args;
-    }
-
-    public Set<String> getExceptions() {
-      return exceptions;
-    }
-
-    public void setExceptions(Set<String> exceptions) {
-      this.exceptions = exceptions;
-    }
-
-    void compareAndReport(MethodInfo underTest) {
-      // Check to see if they've added or removed exceptions
-      // Make a copy so I can remove them as I check them off and easily find any that have been
-      // added.
-      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
-      for (String exception : exceptions) {
-        if (underTest.exceptions.contains(exception)) {
-          underTestExceptions.remove(exception);
-        } else {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " removes exception " + exception);
-        }
-      }
-      if (underTestExceptions.size() > 0) {
-        for (String underTestException : underTest.exceptions) {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " adds exception " + underTestException);
-        }
-      }
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof MethodInfo)) return false;
-      MethodInfo that = (MethodInfo)other;
-
-      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
-          returnType.equals(that.returnType) && args.equals(that.args);
-    }
-
-    @Override
-    public int hashCode() {
-      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
-          args.hashCode();
-    }
-
-    @Override
-    public String toString() {
-      StringBuilder buf = new StringBuilder(returnType)
-          .append(" ")
-          .append(name)
-          .append('(');
-      boolean first = true;
-      for (String arg : args) {
-        if (first) first = false;
-        else buf.append(", ");
-        buf.append(arg);
-      }
-      buf.append(")");
-      if (exceptions.size() > 0) {
-        buf.append(" throws ");
-        first = true;
-        for (String exception : exceptions) {
-          if (first) first = false;
-          else buf.append(", ");
-          buf.append(exception);
-        }
-      }
-      return buf.toString();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
deleted file mode 100644
index 4110d5d..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.GenericOptionsParser;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.StringTokenizer;
-
-public class HCatalogMR extends Configured implements Tool {
-  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
-  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
-
-  @Override
-  public int run(String[] args) throws Exception {
-    String inputTable = null;
-    String outputTable = null;
-    String inputSchemaStr = null;
-    String outputSchemaStr = null;
-    for(int i = 0; i < args.length; i++){
-        if(args[i].equalsIgnoreCase("-it")){
-            inputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-ot")){
-            outputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-is")){
-            inputSchemaStr = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-os")){
-            outputSchemaStr = args[i+1];
-        }
-    }
-    
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    conf.set(INPUT_SCHEMA, inputSchemaStr);
-    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
-
-    Job job = new Job(conf, "odpi_hcat_test");
-    HCatInputFormat.setInput(job, "default", inputTable);
-
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setJarByClass(HCatalogMR.class);
-    job.setMapperClass(Map.class);
-    job.setReducerClass(Reduce.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(IntWritable.class);
-    job.setOutputKeyClass(WritableComparable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
-    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
-    job.setOutputFormatClass(HCatOutputFormat.class);
-
-    return job.waitForCompletion(true) ? 0 : 1;
-
-
-  }
-  public static class Map extends Mapper<WritableComparable,
-          HCatRecord, Text, IntWritable> {
-    private final static IntWritable one = new IntWritable(1);
-    private Text word = new Text();
-    private HCatSchema inputSchema = null;
-
-    @Override
-    protected void map(WritableComparable key, HCatRecord value, Context context)
-        throws IOException, InterruptedException {
-      if (inputSchema == null) {
-        inputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
-      }
-      String line = value.getString("line", inputSchema);
-      StringTokenizer tokenizer = new StringTokenizer(line);
-      while (tokenizer.hasMoreTokens()) {
-        word.set(tokenizer.nextToken());
-        context.write(word, one);
-      }
-    }
-  }
-
-  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
-    private HCatSchema outputSchema = null;
-
-    @Override
-    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
-        IOException, InterruptedException {
-      if (outputSchema == null) {
-        outputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
-      }
-      int sum = 0;
-      for (IntWritable i : values) {
-        sum += i.get();
-      }
-      HCatRecord output = new DefaultHCatRecord(2);
-      output.set("word", outputSchema, key);
-      output.set("count", outputSchema, sum);
-      context.write(null, output);
-    }
-  }
-
-  public static void main(String[] args) throws Exception {
-    int exitCode = ToolRunner.run(new HCatalogMR(), args);
-    System.exit(exitCode);
-  }
- }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
deleted file mode 100755
index 8c9ab5e..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env bash
-
-############################################################################
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-############################################################################
-
-############################################################################
-# This script is used to generate the hadoop-*-api.report.json files in the
-# test/resources directory.  To use it, you will first need to download an
-# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the
-# directory where you untar that distribution.  You will then need to set
-# BIGTTOP_HOME to the directory where your bigtop source is located.  Then
-# run this script for each of the jars you want to generate a report for.
-# The arguments passed to this script should be -p <outputdir> -j <jarfile>
-# where outputdir is the directory you'd like to write the report to and
-# jarfile is the full path of the jar to generate the report for.  Reports
-# should be generated for the following jars: hadoop-common, hadoop-hdfs,
-# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and
-# hadoop-mapreduce-client-core
-#
-# Example usage:
-# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3
-# export BIGTOP_HOME=/home/me/git/bigtop
-# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
-#
-# The resulting reports should be committed to git.  This script only needs
-# to be run once per ODPi release.
-############################################################################
-
-
-if [ "x${APACHE_HADOOP_DIR}" = "x" ]
-then
-    echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"
-    exit 1
-fi
-
-if [ "x${BIGTOP_HOME}" = "x" ]
-then
-    echo "You must set BIGTOP_HOME to the root directory for your bigtop source"
-    exit 1
-fi
-
-for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar`
-do
-    CLASSPATH=$CLASSPATH:$jar
-done
-
-for jar in `find $APACHE_HADOOP_DIR -name \*.jar`
-do
-    CLASSPATH=$CLASSPATH:$jar
-done
-
-java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
-


[41/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list
new file mode 100644
index 0000000..0316787
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-jar.list
@@ -0,0 +1,22 @@
+netty-3\.6\.2\.Final[\.\-_].*jar
+leveldbjni-all-1\.8[\.\-_].*jar
+paranamer-2\.3[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+guice-3\.0[\.\-_].*jar
+avro-1\.7\.[4-7][\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+jersey-guice-1\.9[\.\-_].*jar
+commons-compress-1\.4\.1[\.\-_].*jar
+junit-4\.11[\.\-_].*jar
+xz-1\.0[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+aopalliance-1\.0[\.\-_].*jar
+javax\.inject-1[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+hamcrest-core-1\.3[\.\-_].*jar
+guice-servlet-3\.0[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list
new file mode 100644
index 0000000..cc06d80
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce.list
@@ -0,0 +1,123 @@
+hadoop-mapreduce-client-core[\.\-_].*jar
+bin
+bin/mapred
+sbin
+sbin/mr-jobhistory-daemon\.sh
+hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar
+commons-digester-1\.8[\.\-_].*jar
+curator-client-2\.7\.1[\.\-_].*jar
+commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
+jsp-api-2\.1[\.\-_].*jar
+jets3t-0\.9\.0[\.\-_].*jar
+hadoop-sls-2\.7\.[0-9][\.\-_].*jar
+jackson-core-2\.2\.3[\.\-_].*jar
+hadoop-mapreduce-client-hs-2\.7\.[0-9][\.\-_].*jar
+hadoop-mapreduce-client-jobclient-2\.7\.[0-9].*-tests\.jar
+hadoop-distcp[\.\-_].*jar
+jaxb-api-2\.2\.2[\.\-_].*jar
+api-util-1\.0\.0-M20[\.\-_].*jar
+jettison-1\.1[\.\-_].*jar
+commons-lang3-3\.3\.2[\.\-_].*jar
+curator-framework-2\.7\.1[\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+hadoop-mapreduce-client-hs-plugins[\.\-_].*jar
+metrics-core-3\.0\.1[\.\-_].*jar
+hadoop-mapreduce-client-app[\.\-_].*jar
+jetty-util-6\.1\.26[\.\-_].*jar
+avro-1\.7\.[4-7][\.\-_].*jar
+jaxb-impl-2\.2\.3-1[\.\-_].*jar
+hadoop-mapreduce-client-hs[\.\-_].*jar
+hadoop-mapreduce-client-hs-plugins-2\.7\.[0-9][\.\-_].*jar
+hadoop-sls[\.\-_].*jar
+hadoop-ant[\.\-_].*jar
+netty-3\.6\.2\.Final[\.\-_].*jar
+httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
+jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
+hadoop-mapreduce-client-jobclient[\.\-_].*jar
+hadoop-archives[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+hadoop-mapreduce-examples-2\.7\.[0-9][\.\-_].*jar
+hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar
+snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+gson-2\.2\.4[\.\-_].*jar
+hadoop-gridmix[\.\-_].*jar
+commons-net-3\.1[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+commons-compress-1\.4\.1[\.\-_].*jar
+mockito-all-1\.8\.5[\.\-_].*jar
+hadoop-openstack[\.\-_].*jar
+jackson-xc-1\.9\.13[\.\-_].*jar
+junit-4\.11[\.\-_].*jar
+jersey-json-1\.9[\.\-_].*jar
+hadoop-distcp-2\.7\.[0-9][\.\-_].*jar
+xmlenc-0\.52[\.\-_].*jar
+api-asn1-api-1\.0\.0-M20[\.\-_].*jar
+commons-codec-1\.4[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+servlet-api-2\.5[\.\-_].*jar
+paranamer-2\.3[\.\-_].*jar
+hadoop-datajoin-2\.7\.[0-9][\.\-_].*jar
+jetty-6\.1\.26[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+hadoop-extras-2\.7\.[0-9][\.\-_].*jar
+hadoop-mapreduce-client-shuffle[\.\-_].*jar
+apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
+hadoop-auth-2\.7\.[0-9][\.\-_].*jar
+hadoop-streaming-2\.7\.[0-9][\.\-_].*jar
+hadoop-gridmix-2\.7\.[0-9][\.\-_].*jar
+commons-math3-3\.1\.1[\.\-_].*jar
+hadoop-auth[\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+hamcrest-core-1\.3[\.\-_].*jar
+hadoop-mapreduce-examples[\.\-_].*jar
+hadoop-extras[\.\-_].*jar
+stax-api-1\.0-2[\.\-_].*jar
+hadoop-mapreduce-client-common[\.\-_].*jar
+xz-1\.0[\.\-_].*jar
+zookeeper-3\.4\.6[\.\-_].*jar
+hadoop-archives-2\.7\.[0-9][\.\-_].*jar
+activation-1\.1[\.\-_].*jar
+hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar
+htrace-core-3\.1\.0-incubating[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
+hadoop-datajoin[\.\-_].*jar
+apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
+java-xmlbuilder-0\.4[\.\-_].*jar
+httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
+hadoop-rumen-2\.7\.[0-9][\.\-_].*jar
+hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar
+guava-11\.0\.2[\.\-_].*jar
+jsr305-3\.0\.0[\.\-_].*jar
+hadoop-streaming[\.\-_].*jar
+hadoop-rumen[\.\-_].*jar
+jackson-jaxrs-1\.9\.13[\.\-_].*jar
+lib
+lib/leveldbjni-all-1\.8[\.\-_].*jar
+lib/commons-io-2\.4[\.\-_].*jar
+lib/avro-1\.7\.[4-7][\.\-_].*jar
+lib/jersey-guice-1\.9[\.\-_].*jar
+lib/netty-3\.6\.2\.Final[\.\-_].*jar
+lib/jersey-core-1\.9[\.\-_].*jar
+lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+lib/asm-3\.2[\.\-_].*jar
+lib/commons-compress-1\.4\.1[\.\-_].*jar
+lib/aopalliance-1\.0[\.\-_].*jar
+lib/junit-4\.11[\.\-_].*jar
+lib/jackson-core-asl-1\.9\.13[\.\-_].*jar
+lib/paranamer-2\.3[\.\-_].*jar
+lib/jersey-server-1\.9[\.\-_].*jar
+lib/log4j-1\.2\.17[\.\-_].*jar
+lib/hamcrest-core-1\.3[\.\-_].*jar
+lib/xz-1\.0[\.\-_].*jar
+lib/javax\.inject-1[\.\-_].*jar
+lib/protobuf-java-2\.5\.0[\.\-_].*jar
+lib/guice-3\.0[\.\-_].*jar
+lib/guice-servlet-3\.0[\.\-_].*jar
+hadoop-openstack-2\.7\.[0-9][\.\-_].*jar
+commons-httpclient-3\.1[\.\-_].*jar
+commons-collections-3\.2\.[12][\.\-_].*jar
+commons-logging-1\.1\.3[\.\-_].*jar
+commons-lang-2\.6[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list
new file mode 100644
index 0000000..b0a5654
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-subprojs.list
@@ -0,0 +1,4 @@
+hadoop-annotations\.jar
+hadoop-auth\.jar
+hadoop-common\.jar
+hadoop-nfs\.jar


[33/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list
deleted file mode 100644
index 0316787..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-jar.list
+++ /dev/null
@@ -1,22 +0,0 @@
-netty-3\.6\.2\.Final[\.\-_].*jar
-leveldbjni-all-1\.8[\.\-_].*jar
-paranamer-2\.3[\.\-_].*jar
-jackson-core-asl-1\.9\.13[\.\-_].*jar
-jersey-server-1\.9[\.\-_].*jar
-guice-3\.0[\.\-_].*jar
-avro-1\.7\.[4-7][\.\-_].*jar
-log4j-1\.2\.17[\.\-_].*jar
-jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-jersey-core-1\.9[\.\-_].*jar
-jersey-guice-1\.9[\.\-_].*jar
-commons-compress-1\.4\.1[\.\-_].*jar
-junit-4\.11[\.\-_].*jar
-xz-1\.0[\.\-_].*jar
-asm-3\.2[\.\-_].*jar
-aopalliance-1\.0[\.\-_].*jar
-javax\.inject-1[\.\-_].*jar
-protobuf-java-2\.5\.0[\.\-_].*jar
-commons-io-2\.4[\.\-_].*jar
-hamcrest-core-1\.3[\.\-_].*jar
-guice-servlet-3\.0[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
deleted file mode 100644
index cc06d80..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce.list
+++ /dev/null
@@ -1,123 +0,0 @@
-hadoop-mapreduce-client-core[\.\-_].*jar
-bin
-bin/mapred
-sbin
-sbin/mr-jobhistory-daemon\.sh
-hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar
-commons-digester-1\.8[\.\-_].*jar
-curator-client-2\.7\.1[\.\-_].*jar
-commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
-jsp-api-2\.1[\.\-_].*jar
-jets3t-0\.9\.0[\.\-_].*jar
-hadoop-sls-2\.7\.[0-9][\.\-_].*jar
-jackson-core-2\.2\.3[\.\-_].*jar
-hadoop-mapreduce-client-hs-2\.7\.[0-9][\.\-_].*jar
-hadoop-mapreduce-client-jobclient-2\.7\.[0-9].*-tests\.jar
-hadoop-distcp[\.\-_].*jar
-jaxb-api-2\.2\.2[\.\-_].*jar
-api-util-1\.0\.0-M20[\.\-_].*jar
-jettison-1\.1[\.\-_].*jar
-commons-lang3-3\.3\.2[\.\-_].*jar
-curator-framework-2\.7\.1[\.\-_].*jar
-commons-io-2\.4[\.\-_].*jar
-hadoop-mapreduce-client-hs-plugins[\.\-_].*jar
-metrics-core-3\.0\.1[\.\-_].*jar
-hadoop-mapreduce-client-app[\.\-_].*jar
-jetty-util-6\.1\.26[\.\-_].*jar
-avro-1\.7\.[4-7][\.\-_].*jar
-jaxb-impl-2\.2\.3-1[\.\-_].*jar
-hadoop-mapreduce-client-hs[\.\-_].*jar
-hadoop-mapreduce-client-hs-plugins-2\.7\.[0-9][\.\-_].*jar
-hadoop-sls[\.\-_].*jar
-hadoop-ant[\.\-_].*jar
-netty-3\.6\.2\.Final[\.\-_].*jar
-httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
-jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
-hadoop-mapreduce-client-jobclient[\.\-_].*jar
-hadoop-archives[\.\-_].*jar
-jersey-core-1\.9[\.\-_].*jar
-jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-hadoop-mapreduce-examples-2\.7\.[0-9][\.\-_].*jar
-hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar
-snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-gson-2\.2\.4[\.\-_].*jar
-hadoop-gridmix[\.\-_].*jar
-commons-net-3\.1[\.\-_].*jar
-asm-3\.2[\.\-_].*jar
-commons-compress-1\.4\.1[\.\-_].*jar
-mockito-all-1\.8\.5[\.\-_].*jar
-hadoop-openstack[\.\-_].*jar
-jackson-xc-1\.9\.13[\.\-_].*jar
-junit-4\.11[\.\-_].*jar
-jersey-json-1\.9[\.\-_].*jar
-hadoop-distcp-2\.7\.[0-9][\.\-_].*jar
-xmlenc-0\.52[\.\-_].*jar
-api-asn1-api-1\.0\.0-M20[\.\-_].*jar
-commons-codec-1\.4[\.\-_].*jar
-jackson-core-asl-1\.9\.13[\.\-_].*jar
-servlet-api-2\.5[\.\-_].*jar
-paranamer-2\.3[\.\-_].*jar
-hadoop-datajoin-2\.7\.[0-9][\.\-_].*jar
-jetty-6\.1\.26[\.\-_].*jar
-jersey-server-1\.9[\.\-_].*jar
-hadoop-extras-2\.7\.[0-9][\.\-_].*jar
-hadoop-mapreduce-client-shuffle[\.\-_].*jar
-apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
-hadoop-auth-2\.7\.[0-9][\.\-_].*jar
-hadoop-streaming-2\.7\.[0-9][\.\-_].*jar
-hadoop-gridmix-2\.7\.[0-9][\.\-_].*jar
-commons-math3-3\.1\.1[\.\-_].*jar
-hadoop-auth[\.\-_].*jar
-log4j-1\.2\.17[\.\-_].*jar
-hamcrest-core-1\.3[\.\-_].*jar
-hadoop-mapreduce-examples[\.\-_].*jar
-hadoop-extras[\.\-_].*jar
-stax-api-1\.0-2[\.\-_].*jar
-hadoop-mapreduce-client-common[\.\-_].*jar
-xz-1\.0[\.\-_].*jar
-zookeeper-3\.4\.6[\.\-_].*jar
-hadoop-archives-2\.7\.[0-9][\.\-_].*jar
-activation-1\.1[\.\-_].*jar
-hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar
-htrace-core-3\.1\.0-incubating[\.\-_].*jar
-protobuf-java-2\.5\.0[\.\-_].*jar
-hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
-hadoop-datajoin[\.\-_].*jar
-apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
-java-xmlbuilder-0\.4[\.\-_].*jar
-httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
-hadoop-rumen-2\.7\.[0-9][\.\-_].*jar
-hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar
-guava-11\.0\.2[\.\-_].*jar
-jsr305-3\.0\.0[\.\-_].*jar
-hadoop-streaming[\.\-_].*jar
-hadoop-rumen[\.\-_].*jar
-jackson-jaxrs-1\.9\.13[\.\-_].*jar
-lib
-lib/leveldbjni-all-1\.8[\.\-_].*jar
-lib/commons-io-2\.4[\.\-_].*jar
-lib/avro-1\.7\.[4-7][\.\-_].*jar
-lib/jersey-guice-1\.9[\.\-_].*jar
-lib/netty-3\.6\.2\.Final[\.\-_].*jar
-lib/jersey-core-1\.9[\.\-_].*jar
-lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-lib/asm-3\.2[\.\-_].*jar
-lib/commons-compress-1\.4\.1[\.\-_].*jar
-lib/aopalliance-1\.0[\.\-_].*jar
-lib/junit-4\.11[\.\-_].*jar
-lib/jackson-core-asl-1\.9\.13[\.\-_].*jar
-lib/paranamer-2\.3[\.\-_].*jar
-lib/jersey-server-1\.9[\.\-_].*jar
-lib/log4j-1\.2\.17[\.\-_].*jar
-lib/hamcrest-core-1\.3[\.\-_].*jar
-lib/xz-1\.0[\.\-_].*jar
-lib/javax\.inject-1[\.\-_].*jar
-lib/protobuf-java-2\.5\.0[\.\-_].*jar
-lib/guice-3\.0[\.\-_].*jar
-lib/guice-servlet-3\.0[\.\-_].*jar
-hadoop-openstack-2\.7\.[0-9][\.\-_].*jar
-commons-httpclient-3\.1[\.\-_].*jar
-commons-collections-3\.2\.[12][\.\-_].*jar
-commons-logging-1\.1\.3[\.\-_].*jar
-commons-lang-2\.6[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list
deleted file mode 100644
index b0a5654..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-subprojs.list
+++ /dev/null
@@ -1,4 +0,0 @@
-hadoop-annotations\.jar
-hadoop-auth\.jar
-hadoop-common\.jar
-hadoop-nfs\.jar


[09/50] [abbrv] bigtop git commit: created base beelineUrl for better arguments add

Posted by rv...@apache.org.
created base beelineUrl for better arguments add

created base beelineUrl for better arguments add
(cherry picked from commit 4fae560516a9ca4575196ecf11e9ebccecde456f)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/55ef7684
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/55ef7684
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/55ef7684

Branch: refs/heads/master
Commit: 55ef7684e0a16c9a3e64cbbba81669c2eb82de2d
Parents: 46cd194
Author: roypradeep <ro...@us.ibm.com>
Authored: Tue Nov 1 18:48:26 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:11 2017 -0700

----------------------------------------------------------------------
 .../odpi/specs/runtime/hive/TestBeeline.java    | 226 +++++--------------
 1 file changed, 60 insertions(+), 166 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/55ef7684/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index 37c71cf..682949d 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -1,5 +1,4 @@
 package org.odpi.specs.runtime.hive;
-
 import org.apache.commons.exec.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -11,7 +10,6 @@ import java.io.FileNotFoundException;
 import java.io.PrintWriter;
 import java.util.Map;
 
-
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -31,45 +29,36 @@ import java.util.Map;
  */
 
 public class TestBeeline {
-
+	
 	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
-
 	private static final String URL = "odpiHiveTestJdbcUrl";
 	private static final String USER = "odpiHiveTestJdbcUser";
 	private static final String PASSWD = "odpiHiveTestJdbcPassword";
-
 	private static Map<String, String> results;
-
 	private static String beelineUrl; 
 	private static String beelineUser;
 	private static String beelinePasswd;
-	private static String testUrl;
-
-	//flags to check if username and password should be added as argument in some tests
-	private static boolean bothUserPass = false;
-	private static boolean onlyUser = false;
+	
+	//creating beeline base command with username and password as per inputs
+	private static CommandLine beelineBaseCommand = new CommandLine("beeline");
 
 	@BeforeClass
 	public static void initialSetup(){
-
 		TestBeeline.beelineUrl = System.getProperty(URL);
 		TestBeeline.beelineUser = System.getProperty(USER);
 		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
-		TestBeeline.testUrl = System.getProperty(URL);
 
-		// Create Url with username and/or passowrd to handle all ways to connect to beeline
 		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
 		{ 
-			testUrl = beelineUrl+" -n "+beelineUser+" -p "+beelinePasswd; 
-			bothUserPass=true;
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd);
 		}
 		else if (beelineUser != null && beelineUser != "") 
 		{ 
-			testUrl = beelineUrl+" -n "+beelineUser; 
-			onlyUser=true;
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser);
+		}
+		else {
+			beelineBaseCommand.addArgument("-u").addArgument(beelineUrl);
 		}
-		System.out.println("Setting url"+testUrl); 
-
 		LOG.info("URL is " + beelineUrl); 
 		LOG.info("User is " + beelineUser);
 		LOG.info("Passwd is " + beelinePasswd); 
@@ -78,124 +67,67 @@ public class TestBeeline {
 
 	@Test
 	public void checkBeeline() {
-
-		System.out.println(beelineUrl);  
-
-		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(testUrl));
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-
+		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
 	}
-
+	
 	@Test
 	public void checkBeelineConnect(){
-		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd+";"); out.println("!quit;"); } 
+		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd); out.println("!quit"); } 
 		catch (FileNotFoundException e1) {
-			
 			e1.printStackTrace();
 		}
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-
-
-		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
+		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl.toLowerCase()) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
 	}
-
+	
 	@Test
 	public void checkBeelineHelp(){
 		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
 		String consoleMsg = results.get("outputStream").toLowerCase();
 		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
-
 	}
-
+	
 	@Test
 	public void checkBeelineQueryExecFromCmdLine(){
-
-		if (bothUserPass) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
-
-			if(!results.get("outputStream").contains("odpi_runtime_hive")){
-
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
-			}else{
-
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
-
-			}
-			String consoleMsg = results.get("outputStream").toLowerCase();
-			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-		}
-		else if (onlyUser) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
-
-			if(!results.get("outputStream").contains("odpi_runtime_hive")){
-
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
-			}else{
-
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
-
-			}
-			String consoleMsg = results.get("outputStream").toLowerCase();
-			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-		}
-		else {
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES;"));
-
-			if(!results.get("outputStream").contains("odpi_runtime_hive")){
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-
-			}else{
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES;"));
-			
-
-			}
-			String consoleMsg = results.get("outputStream").toLowerCase();
-			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
+		}else{
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("SHOW DATABASES;"));
 		}
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
 	}
-
+	
 	@Test
 	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-
+		
 		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
 		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f1.sql",false));
-
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
+		
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f2.sql",false));
-
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f2.sql",false));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f3.sql",false));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f3.sql",false));
 		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f1.sql",false));
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f1.sql",false));
 
 		String consoleMsg = results.get("outputStream").toLowerCase();
 		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f4.sql",false));		    
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-f").addArgument("beeline-f4.sql",false));	
 	}
-
+	
 	@Test
 	public void checkBeelineInitFile() throws FileNotFoundException{
 
@@ -203,22 +135,20 @@ public class TestBeeline {
 		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
 		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
 		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
-
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i1.sql",false));
-
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
+	
 		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i2.sql",false));
-
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i2.sql",false));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i3.sql",false));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i3.sql",false));
 		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i1.sql",false));
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i1.sql",false));
 		String consoleMsg = results.get("outputStream").toLowerCase();
 		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i4.sql",false));		    
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("-i").addArgument("beeline-i4.sql",false));	
 	}
-
+	
 	@Test
 	public void checkBeelineHiveVar() throws FileNotFoundException{
 
@@ -226,80 +156,44 @@ public class TestBeeline {
 		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
 		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
 		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
-		String consoleMsg = results.get("outputStream");
-
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+	
 		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv2.sql",false));
-
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv2.sql",false));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv3.sql",false));
+			results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv3.sql",false));
 		}
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
-		consoleMsg = results.get("outputStream").toLowerCase();
-
+		
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
 		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
-		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv4.sql",false));		    
+		HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--hivevar").addArgument("db=odpi_runtime_beeline_hivevar").addArgument("-i").addArgument("beeline-hv4.sql",false));		 
 	}
-
+	
 	@Test
 	public void checkBeelineFastConnect(){
-		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(testUrl).addArgument("--fastConnect=false"));
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--fastConnect=false"));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip")); 
+		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip"));
 	}
 
 	@Test
 	public void checkBeelineVerbose(){
-
-		//explicit check for username password again as url containing -u -p is not working in single addArgument function with testUrl
-
-		if (bothUserPass) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("--verbose=true"));
-		}
-		else if (onlyUser) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("--verbose=true"));
-		}
-		else {
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--verbose=true"));
-		}
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--verbose=true"));
 		String consoleMsg = results.get("outputStream").toLowerCase();
-		Assert.assertEquals("beeline --verbose FAILED using url "+testUrl+". \n" +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+		Assert.assertEquals("beeline --verbose FAILED." +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
 	}
-
+	
 	@Test
 	public void checkBeelineShowHeader(){
-		
-		//explicit check for username password again as url containing -u -p is not working in single addArgument function with testUrl
-
-		if (bothUserPass) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
-		else if (onlyUser) 
-		{ 
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
-		else {
-			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
-		}
+		results = HiveHelper.execCommand(new CommandLine(beelineBaseCommand).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
 		String consoleMsg = results.get("outputStream").toLowerCase();
 		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-
 	}
 
 	@AfterClass
 	public static void cleanup() throws FileNotFoundException {
-
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
-
 	}
-
-
-
 }


[44/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
new file mode 100644
index 0000000..6a6c7af
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-common","version":"2.7.3","classes":{"org.apache.hadoop.record.RecordInput":{"name":"org.apache.hadoop.record.RecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"]
 ,"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","re
 turnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.NullWritable":{"name":"org.apache.hadoop.io.NullWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.NullWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.NullWr
 itable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.NullWritable get()":{"name":"get","returnType":"org.apache.hadoop.io.NullWritable","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.XmlRecordInput":{"name":"org.apache.hadoop.record.XmlRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOEx
 ception":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOExcept
 ion"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.
 lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileSystem":{"name":"org.apache.hadoop.fs.FileSystem","methods":{"org.apache.hadoop.security.token.Token getDelegationToken(java.lang.String) throws java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSyst
 em","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.ContentSummary getContentSummary(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getContentSummary","returnType":"org.apache.hadoop.fs.ContentSummary","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream crea
 te(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.ut
 il.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.Class getFileSystemClass(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystemClass","returnType":"java.lang.Class","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache
 .hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.Map getStatistics()":{"name":"getStatistics","returnType":"java.util.Map","args":[],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FileSystem$Statistics getStatistics(java.lang.String, java.lang.Class)":{"name":"getStatistics","returnType":"org.apache.hadoop.fs.FileSystem$Statistics","args":["java.lang.String","java.lang.Class"],"excep
 tions":[]},"boolean isFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","return
 Type":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listFiles(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listFiles","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IO
 Exception":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean areSymlinksEnabled()":{"name":"areSymlinksEnabled","returnType":"boolean","args":[],"exceptions":[]},"boolean createNewFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createNewFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) th
 rows java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"boolean deleteOnExit(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"deleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream",
 "args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.FileSystem, org.apache.hadoo
 p.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void printStatistics() throws java.io.IOException":{"name":"printStatistics","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void moveFromLocalFile(org.apache.hadoop.fs
 .Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputSt
 ream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus() throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":[],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"[Lorg.apache.hadoop.security.token.Token; addDelegationTokens(java.lang.String, org.apache.hadoop.security.Credentials) throws java.io.IOException":{"name":"addDelegationTokens","returnType":"[Lorg.apache.hadoop.security.token.Token;","args":["java.lang.String","org.apache.hadoop.security.Credentials"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void comple
 teLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void enableSymlinks()":{"name":"enableSymlinks","returnType":"void","args":[],"exceptions":[]},"void moveToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToLocalFile","returnType":"void","args":["org.
 apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksu
 m","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean delete(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"delete","returnType":"
 boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isDirectory(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isDirectory","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.Strin
 g, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"boolean cancelDeleteOnExit(org.apache.hadoop.fs.Path)":{"name":"cancelDeleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path, or
 g.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getNamed(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getNamed","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnT
 ype":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDa
 taOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTim
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void moveFromLocalFile([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.net.URI getDefaultUri(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultUri","returnType":"java.net.URI","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long"],"exceptions":["java.io.IOException"]}
 ,"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.I
 OException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.a
 pache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","
 org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.ap
 ache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.List getAllStatistics()":{"name":"getAllStatistics","returnType":"java.util.List","args":[],"exceptions":[]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntri
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void closeAllForUGI(org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException":{"name":"closeAllForUGI","returnType":"void","args":["org.apache.hadoop.security.UserGroupInformation"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.net.URI)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apach
 e.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void clearStatistics()":{"name":"clearStatistics","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.
 apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.I
 OException"]},"org.apache.hadoop.fs.FileSystem newInstance(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(o
 rg.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"long getLength(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getCanonicalServiceName()":{"name":"getCanonicalServiceName","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockSize(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getReplication(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"mo
 difyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFound
 Exception"]},"org.apache.hadoop.fs.LocalFileSystem newInstanceLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstanceLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"boolean exists(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"exists","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOut
 putStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void closeAll() throws java.io.IOException":{"name":"closeAll","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;) throws java.io.IOException, java.io
 .FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions"
 :["java.io.IOException"]}}},"org.apache.hadoop.fs.BlockLocation":{"name":"org.apache.hadoop.fs.BlockLocation","methods":{"[Ljava.lang.String; getCachedHosts()":{"name":"getCachedHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setTopologyPaths([Ljava.lang.String;) throws java.io.IOException":{"name":"setTopologyPaths","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setHosts([Ljava.lang.String;) throws java.io.IOException":{"name":"setHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setCorrupt(boolean)":{"name":"setCorrupt","returnType":"void","args":["boolean"],"exceptions":[]},"[Ljava.lang.String; getNames() throws java.io.IOException":{"name":"getNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getTopologyPaths() throws java.io.IOException":{"name":"getTopologyPaths","returnType":"[Ljava.lang.Str
 ing;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"long getOffset()":{"name":"getOffset","returnType":"long","args":[],"exceptions":[]},"void setOffset(long)":{"name":"setOffset","returnType":"void","args":["long"],"exceptions":[]},"void setNames([Ljava.lang.String;) throws java.io.IOException":{"name":"setNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setLength(long)":{"name":"setLength","returnType":"void","args":["long"],"exceptions":[]},"[Ljava.lang.String; getHosts() throws java.io.IOException":{"name":"getHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"boolean isCorrupt()":{"name":"isCorrupt","returnType":"boolean","args":[],"exceptions":[]},"void setCachedHosts([Ljava.lang.String;)":{"
 name":"setCachedHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]}}},"org.apache.hadoop.io.Text":{"name":"org.apache.hadoop.io.Text","methods":{"java.lang.String readString(java.io.DataInput, int) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(org.apache.hadoop.io.Text)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Text"],"exceptions":[]},"void validateUTF8([B, int, int) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B","int","int"],"exceptions":["java.nio.charset.MalformedInputException"]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void readFields(java.io.DataInput, int) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["j
 ava.io.DataInput","int"],"exceptions":["java.io.IOException"]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"int bytesToCodePoint(java.nio.ByteBuffer)":{"name":"bytesToCodePoint","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String decode([B, int, int, boolean) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String, boolean) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String","boolean"],"exceptions":["java.nio.char
 set.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String, int) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String","int"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B, int, int) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeS
 tring(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void write(java.io.DataOutput, int) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"void set(java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int utf8Length(java.lang.String)":{"name":"utf8Length","returnType":"int","args":["java.lang.String"],"exceptions":[]},"void readWithKnownLength(java.io.DataInput, int) throws java.io.IOException":{"name":"readWithKnownLength","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.i
 o.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B"],"exceptions":["java.nio.charset.CharacterCodingException"]},"void skip(java.io.DataInput) throws java.io.IOException":{"name":"skip","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int charAt(int)":{"name":"charAt","returnType":"int","args":["int"],"exceptions":[]},"int find(java.lang.String, int)":{"name":"find","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"int find(java.lang.String)":{"name":"find","returnType":"int","args":["java.lang.String"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]}
 ,"void validateUTF8([B) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B"],"exceptions":["java.nio.charset.MalformedInputException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Writable":{"name":"org.apache.hadoop.io.Writable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VLongWritable":{"name":"org.apache.hadoop.io.VLongWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","return
 Type":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VLongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VLongWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VersionedWritable
 ":{"name":"org.apache.hadoop.io.VersionedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte getVersion()":{"name":"getVersion","returnType":"byte","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.SequenceFile":{"name":"org.apache.hadoop.io.SequenceFile","methods":{"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configu
 ration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileContext, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, [Lorg.apache.hadoop.fs.Options$CreateOpts;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileContext","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata","java.util.EnumSet","[Lorg.apache.hadoop.
 fs.Options$CreateOpts;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, [Lorg.apache.hadoop.io.SequenceFile$Writer$Option;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","[Lorg.apache.hadoop.io.SequenceFile$Writer$Option;"],"exceptions":["java.io.IOException"]},"void setDefaultCompressionType(org.apache.hadoop.conf.Configuration, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setDefaultCompressionType","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.io.SequenceFile$Compressi
 onType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","r
 eturnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOExcepti
 on"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, org.apache.hadoop.io.Seq
 uenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","boolean","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.l
 ang.Class","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.Compr
 essionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs
 .Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists","methods":{}},"org.apache.hadoop.fs.FileStatus":{"name":"org.apache.hadoop.fs.FileStatus","methods":{"org.apache.hadoop.fs.permission.FsPermission getPermission()":{"name":"getPermission","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"
 exceptions":[]},"boolean isFile()":{"name":"isFile","returnType":"boolean","args":[],"exceptions":[]},"long getBlockSize()":{"name":"getBlockSize","returnType":"long","args":[],"exceptions":[]},"java.lang.String getOwner()":{"name":"getOwner","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setSymlink(org.apache.hadoop.fs.Path)":{"name":"setSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"long getAccessTime()":{"name":"getAccessTime","returnType":"long","args":[],"exceptions":[]},"boolean isDir()":{"name":"isDir","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isEncr
 ypted()":{"name":"isEncrypted","returnType":"boolean","args":[],"exceptions":[]},"long getLen()":{"name":"getLen","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setPath(org.apache.hadoop.fs.Path)":{"name":"setPath","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getSymlink() throws java.io.IOException":{"name":"getSymlink","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"short getReplication()":{"name":"getReplication","returnType":"short","args":[],"exceptions":[]},"boolean isDirectory()":{"name":"isDirectory","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getGroup()":{"name":"getGroup","returnTy
 pe":"java.lang.String","args":[],"exceptions":[]},"boolean isSymlink()":{"name":"isSymlink","returnType":"boolean","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getModificationTime()":{"name":"getModificationTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.PureJavaCrc32":{"name":"org.apache.hadoop.util.PureJavaCrc32","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.fs.Trash":{"name":"org.apache.hadoop.fs.Trash","methods":{"java.lang.Runnable getEmptier() throws j
 ava.io.IOException":{"name":"getEmptier","returnType":"java.lang.Runnable","args":[],"exceptions":["java.io.IOException"]},"boolean moveToTrash(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToTrash","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void expunge() throws java.io.IOException":{"name":"expunge","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean moveToAppropriateTrash(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"moveToAppropriateTrash","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void checkpoint() throws java.io.IOException":{"name":"checkpoint","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean isEnabled()":{"name":"isEnabled","returnT
 ype":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.record.RecordComparator":{"name":"org.apache.hadoop.record.RecordComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.record.RecordComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.record.RecordComparator"],"exceptions":[]}}},"org.apache.hadoop.record.meta.RecordTypeInfo":{"name":"org.apache.hadoop.record.meta.RecordTypeInfo","methods":{"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"void serialize(org.apache.hadoop.rec
 ord.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"void addField(java.lang.String, org.apache.hadoop.record.meta.TypeID)":{"name":"addField","returnType":"void","args":["java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":[]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"org.apache.hadoop.record.meta.RecordTypeInfo getNestedStructTypeInfo(java.lang.String)":{"name":"getNestedStructTypeInfo","returnType":"org.apache.hadoop.reco
 rd.meta.RecordTypeInfo","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.conf.Configuration":{"name":"org.apache.hadoop.conf.Configuration","methods":{"void addResource(org.apache.hadoop.fs.Path)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"java.util.Set getFinalParameters()":{"name":"getFinalParameters","returnType":"java.util.Set","args":[],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String, java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setPattern(java.lang.String, java.util.regex.Pattern)":{"name":"setPattern","returnType":"void","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void addResource(org.apache.hadoop.conf.Configuration)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.conf.Conf
 iguration"],"exceptions":[]},"java.util.List getInstances(java.lang.String, java.lang.Class)":{"name":"getInstances","returnType":"java.util.List","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void addResource(java.net.URL)":{"name":"addResource","returnType":"void","args":["java.net.URL"],"exceptions":[]},"void setFloat(java.lang.String, float)":{"name":"setFloat","returnType":"void","args":["java.lang.String","float"],"exceptions":[]},"void set(java.lang.String, java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void setBooleanIfUnset(java.lang.String, boolean)":{"name":"setBooleanIfUnset","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"void reloadConfiguration()":{"name":"reloadConfiguration","returnType":"void","args":[],"exceptions":[]},"java.util.regex.Pattern getPattern(java.lang.String, java.util.regex.Pattern)":{"name":"getPat
 tern","returnType":"java.util.regex.Pattern","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"java.lang.String get(java.lang.String, java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setDeprecatedProperties()":{"name":"setDeprecatedProperties","returnType":"void","args":[],"exceptions":[]},"boolean onlyKeyExists(java.lang.String)":{"name":"onlyKeyExists","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"ge
 tLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.Class getClassByName(java.lang.String) throws java.lang.ClassNotFoundException":{"name":"getClassByName","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":["java.lang.ClassNotFoundException"]},"java.io.InputStream getConfResourceAsInputStream(java.lang.String)":{"name":"getConfResourceAsInputStream","returnType":"java.io.InputStream","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void writeXml(java.io.Writer) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.Writer"],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"java.net.URL getResource(java.lang.Str
 ing)":{"name":"getResource","returnType":"java.net.URL","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.lang.String, java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"boolean getBoolean(java.lang.String, boolean)":{"name":"getBoolean","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.Enum getEnum(java.lang.String, java.lang.Enum)":{"name":"getEnum","returnType":"java.lang.Enum","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void set(java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.Strin
 g","java.lang.String"],"exceptions":[]},"void setEnum(java.lang.String, java.lang.Enum)":{"name":"setEnum","returnType":"void","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"[Ljava.lang.Class; getClasses(java.lang.String, [Ljava.lang.Class;)":{"name":"getClasses","returnType":"[Ljava.lang.Class;","args":["java.lang.String","[Ljava.lang.Class;"],"exceptions":[]},"float getFloat(java.lang.String, float)":{"name":"getFloat","returnType":"float","args":["java.lang.String","float"],"exceptions":[]},"long getLongBytes(java.lang.String, long)":{"name":"getLongBytes","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"java.lang.Class getClassByNameOrNull(java.lang.String)":{"name":"getClassByNameOrNull","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":[]},"void setStr
 ings(java.lang.String, [Ljava.lang.String;)":{"name":"setStrings","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void addDeprecations([Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;)":{"name":"addDeprecations","returnType":"void","args":["[Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;"],"exceptions":[]},"[Ljava.lang.String; getPropertySources(java.lang.String)":{"name":"getPropertySources","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getRange(java.lang.String, java.lang.String)":{"name":"getRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setLong(java.lang.String, long)":{"name":"setLong","returnType":"void","args":["java.lang.String","long"],"exceptions":[]},"void setQuietMode(boolean)":{"name":"setQuietMode","returnType":"void","args":["boolean
 "],"exceptions":[]},"void setClassLoader(java.lang.ClassLoader)":{"name":"setClassLoader","returnType":"void","args":["java.lang.ClassLoader"],"exceptions":[]},"[C getPassword(java.lang.String) throws java.io.IOException":{"name":"getPassword","returnType":"[C","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"setTimeDuration","returnType":"void","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void setDouble(java.lang.String, double)":{"name":"setDouble","returnType":"void","args":["java.lang.String","double"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;","java.lang.String"],"exceptions":[]},"java.lang.String get(java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String"]
 ,"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void setClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"setClass","returnType":"void","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"java.util.Collection getStringCollection(java.lang.String)":{"name":"getStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.io.File getFile(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getFile","returnType":"java.io.File","args":["java.lang.String","java.lang.St
 ring"],"exceptions":["java.io.IOException"]},"double getDouble(java.lang.String, double)":{"name":"getDouble","returnType":"double","args":["java.lang.String","double"],"exceptions":[]},"void setBoolean(java.lang.String, boolean)":{"name":"setBoolean","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isDeprecated(java.lang.String)":{"name":"isDeprecated","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void setInt(java.lang.String, int)":{"name":"setInt","returnType":"void","args":["java.lang.String","int"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"long getLong(java.lang.String, long)":{"name":"getLong","retur
 nType":"long","args":["java.lang.String","long"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setAllowNullValueProperties(boolean)":{"name":"setAllowNullValueProperties","returnType":"void","args":["boolean"],"exceptions":[]},"java.util.Collection getTrimmedStringCollection(java.lang.String)":{"name":"getTrimmedStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.io.Reader getConfResourceAsReader(java.lang.String)":{"name":"getConfResourceAsReader","returnType":"java.io.Reader","args":["java.lang.String"],"exceptions":[]},"long getTimeDuration(java.lang.String, long, java.util.concurrent.Time
 Unit)":{"name":"getTimeDuration","returnType":"long","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void addResource(java.io.InputStream)":{"name":"addResource","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","int"],"exceptions":[]},"void dumpDeprecatedKeys()":{"name":"dumpDeprecatedKeys","returnType":"void","args":[],"exceptions":[]},"[I getInts(java.lang.String)":{"name":"getInts","returnType":"[I","args":["java.lang.String"],"exceptions":[]},"void addResource(java.lang.String)":{"name":"addResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.
 String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"void setIfUnset(java.lang.String, java.lang.String)":{"name":"setIfUnset","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void unset(java.lang.String)":{"name":"unset","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void dumpConfiguration(org.apache.hadoop.conf.Configuration, java.io.Writer) throws java.io.IOException":{"name":"dumpConfiguration","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.Writer"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getStrings(java.lang.String)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void addResource(java.io.Input
 Stream, java.lang.String)":{"name":"addResource","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"java.util.Map getValByRegex(java.lang.String)":{"name":"getValByRegex","returnType":"java.util.Map","args":["java.lang.String"],"exceptions":[]},"void setSocketAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"setSocketAddr","returnType":"void","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"int getInt(java.lang.String, int)":{"name":"getInt","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void writeXml(java.io.OutputStream) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.OutputStream"],"exceptions":["java.io.IOException"]},"java.lang.ClassLoader getClassLoader()":{"name":"getClassLoader","returnType":"java.lang.ClassLoader","args":[],"exceptions":[]},"void addDefaultResource(java.lang.String)":{"name":"addDefaultResource","returnType":"void","args":["j
 ava.lang.String"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","int"],"exceptions":[]},"boolean hasWarnedDeprecation(java.lang.String)":{"name":"hasWarnedDeprecation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRaw(java.lang.String)":{"name":"getRaw","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableFactories":{"name":"org.apache.hadoop.io.WritableFactories","methods":{"org.apache.hadoop.io.WritableFactory getFactory(java.lang.Class)":{"name":"getFactory","returnType":"org.apache.hadoop.io.WritableFactory","args":["java.lang.Class"],"exceptions":[
 ]},"void setFactory(java.lang.Class, org.apache.hadoop.io.WritableFactory)":{"name":"setFactory","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableFactory"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.io.SetFile":{"name":"org.apache.hadoop.io.SetFile","methods":{}},"org.apache.hadoop.record.compiler.JString":{"name":"org.apache.hadoop.record.compiler.JString","methods":{}},"org.apache.hadoop.record.compiler.JBoolean":{"name":"org.apache.hadoop.record.compiler.JBoolean","methods":{}},"org.apache.hadoop.io.ShortWritable":{"name":"org.apache.hadoop.io.S
 hortWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ShortWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ShortWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"short get()":{"name":"get","returnType":"short","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(short)":{"name":"set","returnType":"void","args":["short"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":
 {"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.InvalidPathException":{"name":"org.apache.hadoop.fs.InvalidPathException","methods":{}},"org.apache.hadoop.record.compiler.JVector":{"name":"org.apache.hadoop.record.compiler.JVector","methods":{}},"org.apache.hadoop.io.ArrayWritable":{"name":"org.apache.hadoop.io.ArrayWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void set([Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"[Ljava.lang.String; toStrings()":{"name":"toStrings","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.Class getValu
 eClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.IntWritable":{"name":"org.apache.hadoop.io.IntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.IntWritable)":{"name":"compareTo","returnType":"int","args":["or
 g.apache.hadoop.io.IntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.TwoDArrayWritable":{"name":"org.apache.hadoop.io.TwoDArrayWritable","methods":{"[[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set([[Lorg.apache.hadoop.io.Writable;)":{"name":"set","retur
 nType":"void","args":["[[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataInputStream":{"name":"org.apache.hadoop.fs.FSDataInputStream","methods":{"void readFully(long, [B) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B"],"exceptions":["java.io.IOException"]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void readFully(long, [B, int, int) throws java.io.IOException":{"n
 ame":"readFully","returnType":"void","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void unbuffer()":{"name":"unbuffer","returnType":"void","args":[],"exceptions":[]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void setReadahead(java.lang.Long) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setReadahead","returnType":"void","args":["java.lang.Long"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void releaseBuffer(java.nio.ByteBuffer)":{"name":"releaseBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.io.InputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.InputStream","args":[],"exceptions":[]},"java.nio.ByteBuffer read(
 org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":

<TRUNCATED>

[15/50] [abbrv] bigtop git commit: Added TODOs to outline remaining work.

Posted by rv...@apache.org.
Added TODOs to outline remaining work.

(cherry picked from commit 67c51056610260ad99dd2f194a33ec7af4b89c9e)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/a41bb2dd
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/a41bb2dd
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/a41bb2dd

Branch: refs/heads/master
Commit: a41bb2dd74e526512ca5e8eb26e0a8d379cec56f
Parents: ab4f414
Author: Alan Gates <ga...@hortonworks.com>
Authored: Tue Nov 8 07:42:28 2016 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:13 2017 -0700

----------------------------------------------------------------------
 .../src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java | 6 ++++++
 .../test/java/org/odpi/specs/runtime/hive/TestHCatalog.java   | 7 ++++++-
 2 files changed, 12 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/a41bb2dd/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 6fcfe37..a6ff375 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -72,6 +72,12 @@ public class HCatalogMR extends Configured implements Tool {
     HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
     job.setOutputFormatClass(HCatOutputFormat.class);
 
+    // TODO All four of these jars need to be in the distributed cache of the job for the job to
+    // succeed.  I loaded them into a known location in HDFS to get them in the cache.  There may
+    // be a way to load them from a file on the gateway machine.  We could also put in a hdfs dfs
+    // -put operation into a gradle step as part of the build so that the jars are picked up from
+    // the distribution and put in a known location in HDFS from when they can be picked up in
+    // the distributed cache.
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a41bb2dd/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
index bb237d8..87e3eb0 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -118,7 +118,7 @@ public class TestHCatalog {
         new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
         new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
 
-    // TODO Could I use HCatWriter hear and the reader to read it?
+    // LATER Could I use HCatWriter here and the reader to read it?
     // Write some stuff into a file in the location of the table
     table = client.getTable("default", inputTable);
     String inputFile = table.getSd().getLocation() + "/input";
@@ -132,12 +132,17 @@ public class TestHCatalog {
     out.close();
 
     Map<String, String> env = new HashMap<>();
+    // TODO These need to be set from the environment rather than hard wired
     env.put("HADOOP_HOME","/Users/gates/grid/odpi-testing/hadoop-2.7.3");
     env.put("HADOOP_CLASSPATH", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/hcatalog/share/hcatalog/hive-hcatalog-core-1.2.1.jar");
     env.put("HIVE_HOME", "/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin");
     Map<String, String> results = HiveHelper.execCommand(new CommandLine("/Users/gates/grid/odpi-testing/apache-hive-1.2.1-bin/bin/hive")
         .addArgument("--service")
         .addArgument("jar")
+        // TODO This is the jar built by gradle, but I didn't know how to take the jar built in
+        // the build phase and reference it in the test phase.  Perhaps a move operation could be
+        // put in the middle so the jar is moved to a known location that can be referenced here,
+        // or maybe gradle can pass in its working directory so that we can reference it from there.
         .addArgument("/Users/gates/git/bigtop/runtime-1.2.0-SNAPSHOT.jar")
         .addArgument(HCatalogMR.class.getName())
         .addArgument(inputTable)


[47/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
index 06af1da..993dad0 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
@@ -27,306 +27,306 @@ import java.sql.Statement;
 // This does not test every option that Hive supports, but does try to touch the major
 // options, especially anything unique to Hive.  See each test for areas tested and not tested.
 public class TestSql extends JdbcConnector {
-  private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
+    private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
 
-  @Test
-  public void db() throws SQLException {
-    final String db1 = "bigtop_sql_db1";
-    final String db2 = "bigtop_sql_db2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop database if exists " + db1 + " cascade");
+    @Test
+    public void db() throws SQLException {
+        final String db1 = "bigtop_sql_db1";
+        final String db2 = "bigtop_sql_db2";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop database if exists " + db1 + " cascade");
 
-      // Simple create database
-      stmt.execute("create database " + db1);
-      stmt.execute("drop database " + db1);
+            // Simple create database
+            stmt.execute("create database " + db1);
+            stmt.execute("drop database " + db1);
 
-      stmt.execute("drop schema if exists " + db2 + " cascade");
+            stmt.execute("drop schema if exists " + db2 + " cascade");
 
-      String location = getProperty(LOCATION, "a writable directory in HDFS");
+            String location = getProperty(LOCATION, "a writable directory in HDFS");
 
-      // All the bells and whistles
-      stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
-          "' with dbproperties ('a' = 'b')");
+            // All the bells and whistles
+            stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
+                    "' with dbproperties ('a' = 'b')");
 
-      stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
+            stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
 
-      stmt.execute("drop database " + db2 + " restrict");
+            stmt.execute("drop database " + db2 + " restrict");
+        }
     }
-  }
-
-  @Test
-  public void table() throws SQLException {
-    final String table1 = "bigtop_sql_table1";
-    final String table2 = "bigtop_sql_table2";
-    final String table3 = "bigtop_sql_table3";
-    final String table4 = "bigtop_sql_table4";
-    final String table5 = "bigtop_sql_table5";
-
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("drop table if exists " + table2);
-      stmt.execute("drop table if exists " + table3);
-      stmt.execute("drop table if exists " + table4);
-      stmt.execute("drop table if exists " + table5);
-
-      String location = getProperty(LOCATION, "a writable directory in HDFS");
-      stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
-          location + "'");
-
-      // With a little bit of everything, except partitions, we'll do those below
-      stmt.execute("create table if not exists " + table2 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int comment 'a column comment'," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal," +
-          " c8 decimal(12)," +
-          " c9 decimal(8,2)," +
-          " c10 timestamp," +
-          " c11 date," +
-          " c12 string," +
-          " c13 varchar(120)," +
-          " c14 char(10)," +
-          " c15 boolean," +
-          " c16 binary," +
-          " c17 array<string>," +
-          " c18 map <string, string>," +
-          " c19 struct<s1:int, s2:bigint>," +
-          " c20 uniontype<int, string>) " +
-          "comment 'table comment'" +
-          "clustered by (c1) sorted by (c2) into 10 buckets " +
-          "stored as orc " +
-          "tblproperties ('a' = 'b')");
-
-      // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
-
-      stmt.execute("create temporary table " + table3 + " like " + table2);
-
-      stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
-
-      stmt.execute("create table " + table4 + " as select a, b from " + table1);
-
-      stmt.execute("truncate table " + table4);
-
-      stmt.execute("alter table " + table4 + " rename to " + table5);
-      stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
-
-      // Not testing alter of clustered or sorted by, because that's suicidal
-      // Not testing alter of skewed or serde properties since we didn't test it for create
-      // above.
-
-      stmt.execute("drop table " + table1 + " purge");
-      stmt.execute("drop table " + table2);
-      stmt.execute("drop table " + table3);
-      stmt.execute("drop table " + table5);
+
+    @Test
+    public void table() throws SQLException {
+        final String table1 = "bigtop_sql_table1";
+        final String table2 = "bigtop_sql_table2";
+        final String table3 = "bigtop_sql_table3";
+        final String table4 = "bigtop_sql_table4";
+        final String table5 = "bigtop_sql_table5";
+
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + table1);
+            stmt.execute("drop table if exists " + table2);
+            stmt.execute("drop table if exists " + table3);
+            stmt.execute("drop table if exists " + table4);
+            stmt.execute("drop table if exists " + table5);
+
+            String location = getProperty(LOCATION, "a writable directory in HDFS");
+            stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
+                    location + "'");
+
+            // With a little bit of everything, except partitions, we'll do those below
+            stmt.execute("create table if not exists " + table2 +
+                    "(c1 tinyint," +
+                    " c2 smallint," +
+                    " c3 int comment 'a column comment'," +
+                    " c4 bigint," +
+                    " c5 float," +
+                    " c6 double," +
+                    " c7 decimal," +
+                    " c8 decimal(12)," +
+                    " c9 decimal(8,2)," +
+                    " c10 timestamp," +
+                    " c11 date," +
+                    " c12 string," +
+                    " c13 varchar(120)," +
+                    " c14 char(10)," +
+                    " c15 boolean," +
+                    " c16 binary," +
+                    " c17 array<string>," +
+                    " c18 map <string, string>," +
+                    " c19 struct<s1:int, s2:bigint>," +
+                    " c20 uniontype<int, string>) " +
+                    "comment 'table comment'" +
+                    "clustered by (c1) sorted by (c2) into 10 buckets " +
+                    "stored as orc " +
+                    "tblproperties ('a' = 'b')");
+
+            // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
+
+            stmt.execute("create temporary table " + table3 + " like " + table2);
+
+            stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
+
+            stmt.execute("create table " + table4 + " as select a, b from " + table1);
+
+            stmt.execute("truncate table " + table4);
+
+            stmt.execute("alter table " + table4 + " rename to " + table5);
+            stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
+
+            // Not testing alter of clustered or sorted by, because that's suicidal
+            // Not testing alter of skewed or serde properties since we didn't test it for create
+            // above.
+
+            stmt.execute("drop table " + table1 + " purge");
+            stmt.execute("drop table " + table2);
+            stmt.execute("drop table " + table3);
+            stmt.execute("drop table " + table5);
+        }
     }
-  }
 
-  @Test
-  public void partitionedTable() throws SQLException {
-    final String table1 = "bigtop_sql_ptable1";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
+    @Test
+    public void partitionedTable() throws SQLException {
+        final String table1 = "bigtop_sql_ptable1";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + table1);
 
-      stmt.execute("create table " + table1 +
-          "(c1 int," +
-          " c2 varchar(32))" +
-          "partitioned by (p1 string comment 'a partition column')" +
-          "stored as orc");
+            stmt.execute("create table " + table1 +
+                    "(c1 int," +
+                    " c2 varchar(32))" +
+                    "partitioned by (p1 string comment 'a partition column')" +
+                    "stored as orc");
 
-      stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
-      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
-      stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
-      stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
+            stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
+            stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
+            stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
+            stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
+            stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
+            stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
 
-      stmt.execute("alter table " + table1 + " add columns (c3 float)");
-      stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
+            stmt.execute("alter table " + table1 + " add columns (c3 float)");
+            stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
 
-      // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
-      // set location, enable/disable no_drop/offline, compact (because not everyone may have
-      // ACID on), change column
+            // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
+            // set location, enable/disable no_drop/offline, compact (because not everyone may have
+            // ACID on), change column
 
-      stmt.execute("drop table " + table1);
+            stmt.execute("drop table " + table1);
 
+        }
     }
-  }
-
-  @Test
-  public void view() throws SQLException {
-    final String table1 = "bigtop_sql_vtable1";
-    final String view1 = "bigtop_sql_view1";
-    final String view2 = "bigtop_sql_view2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("drop view if exists " + view1);
-      stmt.execute("drop view if exists " + view2);
-      stmt.execute("create table " + table1 + "(a int, b varchar(32))");
-      stmt.execute("create view " + view1 + " as select a from " + table1);
-
-      stmt.execute("create view if not exists " + view2 +
-          " comment 'a view comment' " +
-          "tblproperties ('a' = 'b') " +
-          "as select b from " + table1);
-
-      stmt.execute("alter view " + view1 + " as select a, b from " + table1);
-      stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
-
-      stmt.execute("drop view " + view1);
-      stmt.execute("drop view " + view2);
+
+    @Test
+    public void view() throws SQLException {
+        final String table1 = "bigtop_sql_vtable1";
+        final String view1 = "bigtop_sql_view1";
+        final String view2 = "bigtop_sql_view2";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + table1);
+            stmt.execute("drop view if exists " + view1);
+            stmt.execute("drop view if exists " + view2);
+            stmt.execute("create table " + table1 + "(a int, b varchar(32))");
+            stmt.execute("create view " + view1 + " as select a from " + table1);
+
+            stmt.execute("create view if not exists " + view2 +
+                    " comment 'a view comment' " +
+                    "tblproperties ('a' = 'b') " +
+                    "as select b from " + table1);
+
+            stmt.execute("alter view " + view1 + " as select a, b from " + table1);
+            stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
+
+            stmt.execute("drop view " + view1);
+            stmt.execute("drop view " + view2);
+        }
     }
-  }
 
-  // Not testing indices because they are currently useless in Hive
-  // Not testing macros because as far as I know no one uses them
+    // Not testing indices because they are currently useless in Hive
+    // Not testing macros because as far as I know no one uses them
 
-  @Test
-  public void function() throws SQLException {
-    final String func1 = "bigtop_sql_func1";
-    final String func2 = "bigtop_sql_func2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("create temporary function " + func1 +
-          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
-      stmt.execute("drop temporary function " + func1);
+    @Test
+    public void function() throws SQLException {
+        final String func1 = "bigtop_sql_func1";
+        final String func2 = "bigtop_sql_func2";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("create temporary function " + func1 +
+                    " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+            stmt.execute("drop temporary function " + func1);
 
-      stmt.execute("drop function if exists " + func2);
+            stmt.execute("drop function if exists " + func2);
 
-      stmt.execute("create function " + func2 +
-          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
-      stmt.execute("drop function " + func2);
+            stmt.execute("create function " + func2 +
+                    " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+            stmt.execute("drop function " + func2);
+        }
     }
-  }
-
-  // Not testing grant/revoke/roles as different vendors use different security solutions
-  // and hence different things will work here.
-
-  // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
-  // values and select), and multi-insert.  Load is not tested as there's no guarantee that the
-  // test machine has access to HDFS and thus the ability to upload a file.
-  @Test
-  public void insert() throws SQLException {
-    final String table1 = "bigtop_insert_table1";
-    final String table2 = "bigtop_insert_table2";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("create table " + table1 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal(8,2)," +
-          " c8 varchar(120)," +
-          " c9 char(10)," +
-          " c10 boolean)" +
-          " partitioned by (p1 string)");
-
-      // insert with partition
-      stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
-      stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
-
-      // dynamic partition
-      stmt.execute("explain insert into " + table1 + " partition (p1) values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
-
-      stmt.execute("drop table if exists " + table2);
-
-      stmt.execute("create table " + table2 +
-          "(c1 tinyint," +
-          " c2 smallint," +
-          " c3 int," +
-          " c4 bigint," +
-          " c5 float," +
-          " c6 double," +
-          " c7 decimal(8,2)," +
-          " c8 varchar(120)," +
-          " c9 char(10)," +
-          " c10 boolean)");
-
-      stmt.execute("explain insert into " + table2 + " values " +
-          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
-          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
-
-      stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
-          "c7, c8, c9, c10 from " + table1);
-
-      // multi-insert
-      stmt.execute("from " + table1 +
-          " insert into table " + table1 + " partition (p1 = 'c') " +
-          " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
-          " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
+
+    // Not testing grant/revoke/roles as different vendors use different security solutions
+    // and hence different things will work here.
+
+    // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
+    // values and select), and multi-insert.  Load is not tested as there's no guarantee that the
+    // test machine has access to HDFS and thus the ability to upload a file.
+    @Test
+    public void insert() throws SQLException {
+        final String table1 = "bigtop_insert_table1";
+        final String table2 = "bigtop_insert_table2";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + table1);
+            stmt.execute("create table " + table1 +
+                    "(c1 tinyint," +
+                    " c2 smallint," +
+                    " c3 int," +
+                    " c4 bigint," +
+                    " c5 float," +
+                    " c6 double," +
+                    " c7 decimal(8,2)," +
+                    " c8 varchar(120)," +
+                    " c9 char(10)," +
+                    " c10 boolean)" +
+                    " partitioned by (p1 string)");
+
+            // insert with partition
+            stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
+                    "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+                    "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+            stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
+
+            // dynamic partition
+            stmt.execute("explain insert into " + table1 + " partition (p1) values " +
+                    "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
+                    "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
+
+            stmt.execute("drop table if exists " + table2);
+
+            stmt.execute("create table " + table2 +
+                    "(c1 tinyint," +
+                    " c2 smallint," +
+                    " c3 int," +
+                    " c4 bigint," +
+                    " c5 float," +
+                    " c6 double," +
+                    " c7 decimal(8,2)," +
+                    " c8 varchar(120)," +
+                    " c9 char(10)," +
+                    " c10 boolean)");
+
+            stmt.execute("explain insert into " + table2 + " values " +
+                    "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+                    "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+            stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
+                    "c7, c8, c9, c10 from " + table1);
+
+            // multi-insert
+            stmt.execute("from " + table1 +
+                    " insert into table " + table1 + " partition (p1 = 'c') " +
+                    " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
+                    " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
+        }
     }
-  }
-
-  // This tests CTEs
-  @Test
-  public void cte() throws SQLException {
-    final String table1 = "bigtop_cte_table1";
-    try (Statement stmt = conn.createStatement()) {
-      stmt.execute("drop table if exists " + table1);
-      stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
-      stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
-          " select c1 from cte1");
+
+    // This tests CTEs
+    @Test
+    public void cte() throws SQLException {
+        final String table1 = "bigtop_cte_table1";
+        try (Statement stmt = conn.createStatement()) {
+            stmt.execute("drop table if exists " + table1);
+            stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
+            stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
+                    " select c1 from cte1");
+        }
     }
-  }
 
-  // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
-  // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
-  // subqueries, and over.
+    // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
+    // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
+    // subqueries, and over.
 
-  @Test
-  public void select() throws SQLException {
-    final String[] tables = {"bigtop_select_table1", "bigtop_select_table2"};
-    try (Statement stmt = conn.createStatement()) {
-      for (int i = 0; i < tables.length; i++) {
-        stmt.execute("drop table if exists " + tables[i]);
-        stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
-      }
+    @Test
+    public void select() throws SQLException {
+        final String[] tables = {"bigtop_select_table1", "bigtop_select_table2"};
+        try (Statement stmt = conn.createStatement()) {
+            for (int i = 0; i < tables.length; i++) {
+                stmt.execute("drop table if exists " + tables[i]);
+                stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
+            }
 
-      // single table queries tested above in several places
+            // single table queries tested above in several places
 
-      stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
-          "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "group by a.c2 " +
-          "order by a.c2 asc " +
-          "limit 10");
+            stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
+                    "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
+                    "group by a.c2 " +
+                    "order by a.c2 asc " +
+                    "limit 10");
 
-      stmt.execute("explain select distinct a.c2 " +
-          "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "order by a.c2 desc ");
+            stmt.execute("explain select distinct a.c2 " +
+                    "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+                    "order by a.c2 desc ");
 
-      stmt.execute("explain select a.c2, SUM(a.c1) " +
-          "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
-          "group by a.c2 " +
-          "having SUM(b.c1) > 0 " +
-          "order by a.c2 ");
+            stmt.execute("explain select a.c2, SUM(a.c1) " +
+                    "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+                    "group by a.c2 " +
+                    "having SUM(b.c1) > 0 " +
+                    "order by a.c2 ");
 
-      stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
-          "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
+            stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
+                    "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
 
-      stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
+            stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
 
-      stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
-      stmt.execute("explain select * from " + tables[0] + " cluster by c1");
+            stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
+            stmt.execute("explain select * from " + tables[0] + " cluster by c1");
 
-      stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
-      stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
-          ")");
+            stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
+            stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
+                    ")");
 
-    }
+        }
 
-  }
+    }
 
-  // Update and delete are not tested because not everyone configures their system to run
-  // with ACID.
+    // Update and delete are not tested because not everyone configures their system to run
+    // with ACID.
 
 
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/a05d3813/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index f54b7e5..8139eff 100644
--- a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -45,207 +45,207 @@ import java.util.Random;
 
 public class TestThrift {
 
-  private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
+    private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
+
+    private static IMetaStoreClient client = null;
+    private static HiveConf conf;
+
+    private Random rand;
+
+    @BeforeClass
+    public static void connect() throws MetaException {
+        if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
+            String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
+            conf = new HiveConf();
+            conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
+            LOG.info("Set to test against metastore at " + url);
+            client = new HiveMetaStoreClient(conf);
+        }
+    }
+
+    @Before
+    public void checkIfActive() {
+        Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
+        rand = new Random();
+    }
+
+    @Test
+    public void db() throws TException {
+        final String dbName = "bigtop_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
+
+        Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
+        client.createDatabase(db);
+        db = client.getDatabase(dbName);
+        Assert.assertNotNull(db);
+        db = new Database(db);
+        db.getParameters().put("a", "b");
+        client.alterDatabase(dbName, db);
+        List<String> alldbs = client.getDatabases("bigtop_*");
+        Assert.assertNotNull(alldbs);
+        Assert.assertTrue(alldbs.size() > 0);
+        alldbs = client.getAllDatabases();
+        Assert.assertNotNull(alldbs);
+        Assert.assertTrue(alldbs.size() > 0);
+        client.dropDatabase(dbName, true, true);
+    }
+
+    // Not testing types calls, as they aren't used AFAIK
+
+    @Test
+    public void nonPartitionedTable() throws TException {
+        final String tableName = "bigtop_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+        // I don't test every operation related to tables, but only those that are frequently used.
+        SerDeInfo serde = new SerDeInfo("default_serde",
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+        FieldSchema fs = new FieldSchema("a", "int", "no comment");
+        StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+                new HashMap<String, String>());
+        Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
+                new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+        client.createTable(table);
+
+        table = client.getTable("default", tableName);
+        Assert.assertNotNull(table);
+
+        List<Table> tables =
+                client.getTableObjectsByName("default", Collections.singletonList(tableName));
+        Assert.assertNotNull(tables);
+        Assert.assertEquals(1, tables.size());
+
+        List<String> tableNames = client.getTables("default", "bigtop_*");
+        Assert.assertNotNull(tableNames);
+        Assert.assertTrue(tableNames.size() >= 1);
+
+        tableNames = client.getAllTables("default");
+        Assert.assertNotNull(tableNames);
+        Assert.assertTrue(tableNames.size() >= 1);
 
-  private static IMetaStoreClient client = null;
-  private static HiveConf conf;
+        List<FieldSchema> cols = client.getFields("default", tableName);
+        Assert.assertNotNull(cols);
+        Assert.assertEquals(1, cols.size());
 
-  private Random rand;
+        cols = client.getSchema("default", tableName);
+        Assert.assertNotNull(cols);
+        Assert.assertEquals(1, cols.size());
 
-  @BeforeClass
-  public static void connect() throws MetaException {
-    if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
-      String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
-      conf = new HiveConf();
-      conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
-      LOG.info("Set to test against metastore at " + url);
-      client = new HiveMetaStoreClient(conf);
+        table = new Table(table);
+        table.getParameters().put("a", "b");
+        client.alter_table("default", tableName, table, false);
+
+        table.getParameters().put("c", "d");
+        client.alter_table("default", tableName, table);
+
+        client.dropTable("default", tableName, true, false);
+    }
+
+    @Test
+    public void partitionedTable() throws TException {
+        final String tableName = "bigtop_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+        // I don't test every operation related to tables, but only those that are frequently used.
+        SerDeInfo serde = new SerDeInfo("default_serde",
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+        FieldSchema fs = new FieldSchema("a", "int", "no comment");
+        StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+                new HashMap<String, String>());
+        FieldSchema pk = new FieldSchema("pk", "string", "");
+        Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
+                new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+        client.createTable(table);
+
+        sd = new StorageDescriptor(Collections.singletonList(fs), null,
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+                new HashMap<String, String>());
+        Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
+                0, sd, new HashMap<String, String>());
+        client.add_partition(partition);
+
+        List<Partition> partitions = new ArrayList<>(2);
+        sd = new StorageDescriptor(Collections.singletonList(fs), null,
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+                new HashMap<String, String>());
+        partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
+                0, sd, new HashMap<String, String>()));
+        sd = new StorageDescriptor(Collections.singletonList(fs), null,
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+                conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+                new HashMap<String, String>());
+        partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
+                0, sd, new HashMap<String, String>()));
+        client.add_partitions(partitions);
+
+        List<Partition> parts = client.listPartitions("default", tableName, (short) -1);
+        Assert.assertNotNull(parts);
+        Assert.assertEquals(3, parts.size());
+
+        parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
+                (short) -1);
+        Assert.assertNotNull(parts);
+        Assert.assertEquals(1, parts.size());
+
+        parts = client.listPartitionsWithAuthInfo("default", tableName, (short) -1, "me",
+                Collections.<String>emptyList());
+        Assert.assertNotNull(parts);
+        Assert.assertEquals(3, parts.size());
+
+        List<String> partNames = client.listPartitionNames("default", tableName, (short) -1);
+        Assert.assertNotNull(partNames);
+        Assert.assertEquals(3, partNames.size());
+
+        parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short) -1);
+        Assert.assertNotNull(parts);
+        Assert.assertEquals(1, parts.size());
+
+        parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
+        Assert.assertNotNull(parts);
+        Assert.assertEquals(1, parts.size());
+
+        partition = client.getPartition("default", tableName, Collections.singletonList("x"));
+        Assert.assertNotNull(partition);
+
+        partition = client.getPartition("default", tableName, "pk=x");
+        Assert.assertNotNull(partition);
+
+        partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
+                "me", Collections.<String>emptyList());
+        Assert.assertNotNull(partition);
+
+        partition = new Partition(partition);
+        partition.getParameters().put("a", "b");
+        client.alter_partition("default", tableName, partition);
+
+        for (Partition p : parts) p.getParameters().put("c", "d");
+        client.alter_partitions("default", tableName, parts);
+
+        // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
+        // from the parser.  The odds that anyone other than Hive parser would call this method seem
+        // low, since you'd have to exactly match the serliazation of the Hive parser.
+
+        // Not testing partition marking events, not used by anyone but Hive replication AFAIK
+
+        client.dropPartition("default", tableName, "pk=x", true);
+        client.dropPartition("default", tableName, Collections.singletonList("y"), true);
     }
-  }
-
-  @Before
-  public void checkIfActive() {
-    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
-    rand = new Random();
-  }
-
-  @Test
-  public void db() throws TException {
-    final String dbName = "bigtop_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
-
-    Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
-    client.createDatabase(db);
-    db = client.getDatabase(dbName);
-    Assert.assertNotNull(db);
-    db = new Database(db);
-    db.getParameters().put("a", "b");
-    client.alterDatabase(dbName, db);
-    List<String> alldbs = client.getDatabases("bigtop_*");
-    Assert.assertNotNull(alldbs);
-    Assert.assertTrue(alldbs.size() > 0);
-    alldbs = client.getAllDatabases();
-    Assert.assertNotNull(alldbs);
-    Assert.assertTrue(alldbs.size() > 0);
-    client.dropDatabase(dbName, true, true);
-  }
-
-  // Not testing types calls, as they aren't used AFAIK
-
-  @Test
-  public void nonPartitionedTable() throws TException {
-    final String tableName = "bigtop_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
-
-    // I don't test every operation related to tables, but only those that are frequently used.
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    table = client.getTable("default", tableName);
-    Assert.assertNotNull(table);
-
-    List<Table> tables =
-        client.getTableObjectsByName("default", Collections.singletonList(tableName));
-    Assert.assertNotNull(tables);
-    Assert.assertEquals(1, tables.size());
-
-    List<String> tableNames = client.getTables("default", "bigtop_*");
-    Assert.assertNotNull(tableNames);
-    Assert.assertTrue(tableNames.size() >= 1);
-
-    tableNames = client.getAllTables("default");
-    Assert.assertNotNull(tableNames);
-    Assert.assertTrue(tableNames.size() >= 1);
-
-    List<FieldSchema> cols = client.getFields("default", tableName);
-    Assert.assertNotNull(cols);
-    Assert.assertEquals(1, cols.size());
-
-    cols = client.getSchema("default", tableName);
-    Assert.assertNotNull(cols);
-    Assert.assertEquals(1, cols.size());
-
-    table = new Table(table);
-    table.getParameters().put("a", "b");
-    client.alter_table("default", tableName, table, false);
-
-    table.getParameters().put("c", "d");
-    client.alter_table("default", tableName, table);
-
-    client.dropTable("default", tableName, true, false);
-  }
-
-  @Test
-  public void partitionedTable() throws TException {
-    final String tableName = "bigtop_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
-
-    // I don't test every operation related to tables, but only those that are frequently used.
-    SerDeInfo serde = new SerDeInfo("default_serde",
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
-    FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    FieldSchema pk = new FieldSchema("pk", "string", "");
-    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
-        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    client.createTable(table);
-
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>());
-    client.add_partition(partition);
-
-    List<Partition> partitions = new ArrayList<>(2);
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>()));
-    sd = new StorageDescriptor(Collections.singletonList(fs), null,
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
-        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
-        new HashMap<String, String>());
-    partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>()));
-    client.add_partitions(partitions);
-
-    List<Partition> parts = client.listPartitions("default", tableName, (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(3, parts.size());
-
-    parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
-        (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me",
-        Collections.<String>emptyList());
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(3, parts.size());
-
-    List<String> partNames = client.listPartitionNames("default", tableName, (short)-1);
-    Assert.assertNotNull(partNames);
-    Assert.assertEquals(3, partNames.size());
-
-    parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
-
-    partition = client.getPartition("default", tableName, Collections.singletonList("x"));
-    Assert.assertNotNull(partition);
-
-    partition = client.getPartition("default", tableName, "pk=x");
-    Assert.assertNotNull(partition);
-
-    partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
-        "me", Collections.<String>emptyList());
-    Assert.assertNotNull(partition);
-
-    partition = new Partition(partition);
-    partition.getParameters().put("a", "b");
-    client.alter_partition("default", tableName, partition);
-
-    for (Partition p : parts) p.getParameters().put("c", "d");
-    client.alter_partitions("default", tableName, parts);
-
-    // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
-    // from the parser.  The odds that anyone other than Hive parser would call this method seem
-    // low, since you'd have to exactly match the serliazation of the Hive parser.
-
-    // Not testing partition marking events, not used by anyone but Hive replication AFAIK
-
-    client.dropPartition("default", tableName, "pk=x", true);
-    client.dropPartition("default", tableName, Collections.singletonList("y"), true);
-  }
-
-  // Not testing index calls, as no one uses indices
-
-
-  // Not sure if anyone uses stats calls or not.  Other query engines might.  Ignoring for now.
-
-  // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
-  // won't be runable.
-
-  // Not testing authorization calls as AFAIK no one else uses Hive security
-
-  // Not testing transaction/locking calls, as those are used only by Hive.
-
-  // Not testing notification logging calls, as those are used only by Hive replication.
+
+    // Not testing index calls, as no one uses indices
+
+
+    // Not sure if anyone uses stats calls or not.  Other query engines might.  Ignoring for now.
+
+    // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
+    // won't be runable.
+
+    // Not testing authorization calls as AFAIK no one else uses Hive security
+
+    // Not testing transaction/locking calls, as those are used only by Hive.
+
+    // Not testing notification logging calls, as those are used only by Hive replication.
 
 }


[11/50] [abbrv] bigtop git commit: Made changes to work with our reference implementation.

Posted by rv...@apache.org.
Made changes to work with our reference implementation.

(cherry picked from commit 78c594c387410291fb68b835113d1e30124e73ad)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/166e9f78
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/166e9f78
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/166e9f78

Branch: refs/heads/master
Commit: 166e9f78fc813e015a15710de60d95cb51489962
Parents: ccbdab4
Author: Raj Desai <rd...@us.ibm.com>
Authored: Tue Nov 1 16:17:57 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:11 2017 -0700

----------------------------------------------------------------------
 .../org/odpi/specs/runtime/hive/TestCLI.java    | 88 ++++++++++----------
 1 file changed, 44 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/166e9f78/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
index 18ee81d..2b70909 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestCLI.java
@@ -30,13 +30,13 @@ import org.junit.Assert;
 public class TestCLI {
 	
 	static Map<String, String> results;
+	static String db = "javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=odpi_metastore_db;create=true";
 	
 	@BeforeClass
 	public static void setup(){
 		
 		results = HiveHelper.execCommand(new CommandLine("which").addArgument("hive"));
 		Assert.assertEquals("Hive is not in the current path.", 0, Integer.parseInt(results.get("exitValue")));
-
 	}
 	
 	@Test
@@ -55,17 +55,17 @@ public class TestCLI {
 	@Test
 	public void sqlFromCmdLine(){
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
@@ -74,33 +74,33 @@ public class TestCLI {
 		try(PrintWriter out = new PrintWriter("hive-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		try(PrintWriter out = new PrintWriter("hive-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		try(PrintWriter out = new PrintWriter("hive-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f1.sql").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f2.sql").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f3.sql").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-f").addArgument("hive-f4.sql").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void silent() {
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-S").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("-S option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--silent").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("--silent option did not work.", new Boolean(false), results.get("outputStream").contains("Time taken:"));
 	}
 	
 	@Test
 	public void verbose(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("-v").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("-v option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--verbose").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("--verbose option did not work.", new Boolean(true), results.get("outputStream").contains("SHOW DATABASES"));		
 	}
 	
@@ -109,100 +109,100 @@ public class TestCLI {
 		try(PrintWriter out = new PrintWriter("hive-init1.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		try(PrintWriter out = new PrintWriter("hive-init2.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("SHOW DATABASES command failed to execute.", 0, Integer.parseInt(results.get("exitValue")));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init1.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", 0, Integer.parseInt(results.get("exitValue")));
 			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-i").addArgument("hive-init2.sql").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 			Assert.assertEquals("Could not create database odpi_runtime_hive.", 0, Integer.parseInt(results.get("exitValue")));
 			Assert.assertEquals("Could not create database odpi_runtime_hive using the init -i option.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void database(){
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive_1234").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Non-existent database returned with wrong exit code: "+Integer.parseInt(results.get("exitValue")), 88, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("CREATE TABLE odpi ( MYID INT );").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DESCRIBE odpi").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Failed to get expected column after creating odpi table using --database argument.", true, results.get("outputStream").contains("myid"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--database").addArgument("odpi_runtime_hive").addArgument("-e").addArgument("DROP TABLE odpi").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("Failed to create table using --database argument.", 0, Integer.parseInt(results.get("exitValue")));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void hiveConf(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("--hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("The --hiveconf option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-hiveconf").addArgument("hive.root.logger=INFO,console").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		Assert.assertEquals("The -hiveconf variant option did not work in setting hive.root.logger=INFO,console.", true, results.get("outputStream").contains("INFO parse.ParseDriver: Parsing command: SHOW DATABASES"));
 	}
 	
 	@Test
 	public void variableSubsitution() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
 		try(PrintWriter out = new PrintWriter("hive-define.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES < hive-define.sql", false));		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive -d A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
 		Assert.assertEquals("The hive -d A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
 		Assert.assertEquals("The hive -d A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES < hive-define.sql", false));		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --define A=DATABASES --hiveconf '"+db+"' < hive-define.sql", false));		
 		Assert.assertEquals("The hive --define A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
 		Assert.assertEquals("The hive --define A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@Test
 	public void hiveVar() throws FileNotFoundException{
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("SHOW DATABASES").addArgument("--hiveconf").addArgument(db));
 		if(!results.get("outputStream").contains("odpi_runtime_hive")){
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}else{
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
+			results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		}
 		try(PrintWriter out = new PrintWriter("hive-var.sql")){ out.println("show ${A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES < hive-var.sql", false));		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hivevar A=DATABASES --hiveconf '"+db+"' < hive-var.sql", false));		
 		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
 		Assert.assertEquals("The hive --hivevar A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		
 		try(PrintWriter out = new PrintWriter("hiveconf-var.sql")){ out.println("show ${hiveconf:A};"); out.println("quit;"); }
-		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES < hiveconf-var.sql", false));		
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("hive --hiveconf A=DATABASES --hiveconf '"+db+"' < hiveconf-var.sql", false));		
 		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", 0, Integer.parseInt(results.get("exitValue")));
 		Assert.assertEquals("The hive --hiveconf A=DATABASES option did not work.", true, results.get("outputStream").contains("odpi_runtime_hive"));
 		
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 	}
 	
 	@AfterClass
 	public static void cleanup(){
-		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		results = HiveHelper.execCommand(new CommandLine("hive").addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive").addArgument("--hiveconf").addArgument(db));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-f*.sql", false));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-init*.sql", false));
 		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf hive-define.sql", false));


[43/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list
new file mode 100644
index 0000000..ab6cd51
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-bin.list
@@ -0,0 +1,2 @@
+rcc
+hadoop

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list
new file mode 100644
index 0000000..2edbd0f
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common-jar.list
@@ -0,0 +1,60 @@
+api-util-1\.0\.0-M20[\.\-_].*jar
+curator-recipes-2\.7\.1[\.\-_].*jar
+curator-framework-2\.7\.1[\.\-_].*jar
+netty-3\.6\.2\.Final[\.\-_].*jar
+gson-2\.2\.4[\.\-_].*jar
+paranamer-2\.3[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+jackson-xc-1\.9\.13[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+stax-api-1\.0-2[\.\-_].*jar
+zookeeper-3\.4\.6[\.\-_].*jar
+htrace-core-3\.1\.0-incubating[\.\-_].*jar
+slf4j-api-1\.7\.10[\.\-_].*jar
+avro-1\.7\.[4-7][\.\-_].*jar
+slf4j-log4j12-1\.7\.10[\.\-_].*jar
+curator-client-2\.7\.1[\.\-_].*jar
+jets3t-0\.9\.0[\.\-_].*jar
+commons-net-3\.1[\.\-_].*jar
+jaxb-impl-2\.2\.3-1[\.\-_].*jar
+httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
+apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
+commons-cli-1\.2[\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+java-xmlbuilder-0\.4[\.\-_].*jar
+jsp-api-2\.1[\.\-_].*jar
+guava-11\.0\.2[\.\-_].*jar
+jetty-6\.1\.26[\.\-_].*jar
+commons-logging-1\.1\.3[\.\-_].*jar
+snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+commons-httpclient-3\.1[\.\-_].*jar
+jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+commons-compress-1\.4\.1[\.\-_].*jar
+jettison-1\.1[\.\-_].*jar
+junit-4\.11[\.\-_].*jar
+commons-collections-3\.2\.[12][\.\-_].*jar
+xz-1\.0[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+commons-codec-1\.4[\.\-_].*jar
+commons-digester-1\.8[\.\-_].*jar
+api-asn1-api-1\.0\.0-M20[\.\-_].*jar
+xmlenc-0\.52[\.\-_].*jar
+commons-configuration-1\.6[\.\-_].*jar
+mockito-all-1\.8\.5[\.\-_].*jar
+commons-lang-2\.6[\.\-_].*jar
+jetty-util-6\.1\.26[\.\-_].*jar
+jsr305-3\.0\.0[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+activation-1\.1[\.\-_].*jar
+jersey-json-1\.9[\.\-_].*jar
+jaxb-api-2\.2\.2[\.\-_].*jar
+commons-math3-3\.1\.1[\.\-_].*jar
+hamcrest-core-1\.3[\.\-_].*jar
+commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
+apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
+servlet-api-2\.5[\.\-_].*jar
+jackson-jaxrs-1\.9\.13[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list
new file mode 100644
index 0000000..73ff182
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-common.list
@@ -0,0 +1,230 @@
+bin
+bin/rcc
+bin/hadoop
+sbin
+sbin/hadoop-daemons\.sh
+sbin/hadoop-daemon\.sh
+sbin/slaves\.sh
+hadoop-annotations-2\.7\.[0-9][\.\-_].*jar
+hadoop-common-2\.7\.[0-9][\.\-_].*jar
+hadoop-annotations[\.\-_].*jar
+hadoop-common-2\.7\.[0-9].*-tests\.jar
+etc
+etc/hadoop
+hadoop-common[\.\-_].*jar
+hadoop-auth-2\.7\.[0-9][\.\-_].*jar
+libexec
+libexec/hdfs-config\.sh
+libexec/hadoop-layout\.sh
+libexec/yarn-config\.sh
+libexec/mapred-config\.sh
+libexec/hadoop-config\.sh
+libexec/init-hdfs\.sh
+hadoop-auth[\.\-_].*jar
+hadoop-nfs[\.\-_].*jar
+hadoop-nfs-2\.7\.[0-9][\.\-_].*jar
+client
+client/curator-recipes[\.\-_].*jar
+client/curator-recipes-2\.7\.1[\.\-_].*jar
+client/commons-configuration[\.\-_].*jar
+client/jsr305[\.\-_].*jar
+client/slf4j-log4j12[\.\-_].*jar
+client/hadoop-mapreduce-client-core[\.\-_].*jar
+client/hadoop-hdfs[\.\-_].*jar
+client/commons-configuration-1\.6[\.\-_].*jar
+client/commons-cli-1\.2[\.\-_].*jar
+client/hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar
+client/commons-digester-1\.8[\.\-_].*jar
+client/curator-client-2\.7\.1[\.\-_].*jar
+client/httpclient[\.\-_].*jar
+client/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
+client/jsp-api-2\.1[\.\-_].*jar
+client/leveldbjni-all-1\.8[\.\-_].*jar
+client/slf4j-api-1\.7\.10[\.\-_].*jar
+client/hadoop-annotations-2\.7\.[0-9][\.\-_].*jar
+client/jersey-core[\.\-_].*jar
+client/commons-compress[\.\-_].*jar
+client/stax-api[\.\-_].*jar
+client/jaxb-api-2\.2\.2[\.\-_].*jar
+client/api-util-1\.0\.0-M20[\.\-_].*jar
+client/jackson-xc[\.\-_].*jar
+client/commons-cli[\.\-_].*jar
+client/xml-apis[\.\-_].*jar
+client/curator-client[\.\-_].*jar
+client/curator-framework-2\.7\.1[\.\-_].*jar
+client/commons-io-2\.4[\.\-_].*jar
+client/jackson-core-asl[\.\-_].*jar
+client/avro[\.\-_].*jar
+client/hadoop-mapreduce-client-app[\.\-_].*jar
+client/jetty-util[\.\-_].*jar
+client/guava[\.\-_].*jar
+client/commons-beanutils[\.\-_].*jar
+client/apacheds-i18n[\.\-_].*jar
+client/jetty-util-6\.1\.26[\.\-_].*jar
+client/xercesImpl-2\.9\.1[\.\-_].*jar
+client/commons-logging[\.\-_].*jar
+client/slf4j-api[\.\-_].*jar
+client/commons-digester[\.\-_].*jar
+client/avro-1\.7\.[4-7][\.\-_].*jar
+client/hadoop-common-2\.7\.[0-9][\.\-_].*jar
+client/commons-math3[\.\-_].*jar
+client/hadoop-yarn-common-2\.7\.[0-9][\.\-_].*jar
+client/hadoop-annotations[\.\-_].*jar
+client/xercesImpl[\.\-_].*jar
+client/commons-codec[\.\-_].*jar
+client/netty-3\.6\.2\.Final[\.\-_].*jar
+client/commons-collections[\.\-_].*jar
+client/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
+client/hadoop-mapreduce-client-jobclient[\.\-_].*jar
+client/htrace-core[\.\-_].*jar
+client/jersey-core-1\.9[\.\-_].*jar
+client/xz[\.\-_].*jar
+client/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+client/jsp-api[\.\-_].*jar
+client/commons-httpclient[\.\-_].*jar
+client/netty[\.\-_].*jar
+client/hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar
+client/commons-net[\.\-_].*jar
+client/hadoop-yarn-server-common[\.\-_].*jar
+client/jaxb-api[\.\-_].*jar
+client/apacheds-kerberos-codec[\.\-_].*jar
+client/httpcore[\.\-_].*jar
+client/hadoop-yarn-server-common-2\.7\.[0-9][\.\-_].*jar
+client/hadoop-common[\.\-_].*jar
+client/leveldbjni-all[\.\-_].*jar
+client/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+client/gson-2\.2\.4[\.\-_].*jar
+client/commons-net-3\.1[\.\-_].*jar
+client/api-util[\.\-_].*jar
+client/commons-compress-1\.4\.1[\.\-_].*jar
+client/jackson-xc-1\.9\.13[\.\-_].*jar
+client/netty-all-4\.0\.23\.Final[\.\-_].*jar
+client/xmlenc-0\.52[\.\-_].*jar
+client/jackson-jaxrs[\.\-_].*jar
+client/api-asn1-api[\.\-_].*jar
+client/api-asn1-api-1\.0\.0-M20[\.\-_].*jar
+client/commons-codec-1\.4[\.\-_].*jar
+client/jackson-core-asl-1\.9\.13[\.\-_].*jar
+client/servlet-api-2\.5[\.\-_].*jar
+client/commons-beanutils(-core)?[\.\-_].*jar
+client/paranamer-2\.3[\.\-_].*jar
+client/hadoop-yarn-api-2\.7\.[0-9][\.\-_].*jar
+client/hadoop-mapreduce-client-shuffle[\.\-_].*jar
+client/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
+client/hadoop-yarn-common[\.\-_].*jar
+client/hadoop-auth-2\.7\.[0-9][\.\-_].*jar
+client/snappy-java[\.\-_].*jar
+client/gson[\.\-_].*jar
+client/xml-apis-1\.3\.04[\.\-_].*jar
+client/commons-io[\.\-_].*jar
+client/commons-math3-3\.1\.1[\.\-_].*jar
+client/log4j[\.\-_].*jar
+client/hadoop-auth[\.\-_].*jar
+client/log4j-1\.2\.17[\.\-_].*jar
+client/servlet-api[\.\-_].*jar
+client/hadoop-hdfs-2\.7\.[0-9][\.\-_].*jar
+client/activation[\.\-_].*jar
+client/zookeeper[\.\-_].*jar
+client/xmlenc[\.\-_].*jar
+client/stax-api-1\.0-2[\.\-_].*jar
+client/hadoop-yarn-client-2\.7\.[0-9][\.\-_].*jar
+client/jersey-client-1\.9[\.\-_].*jar
+client/hadoop-mapreduce-client-common[\.\-_].*jar
+client/xz-1\.0[\.\-_].*jar
+client/zookeeper-3\.4\.6[\.\-_].*jar
+client/activation-1\.1[\.\-_].*jar
+client/hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar
+client/htrace-core-3\.1\.0-incubating[\.\-_].*jar
+client/protobuf-java-2\.5\.0[\.\-_].*jar
+client/hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
+client/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
+client/commons-lang[\.\-_].*jar
+client/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
+client/paranamer[\.\-_].*jar
+client/hadoop-yarn-api[\.\-_].*jar
+client/jersey-client[\.\-_].*jar
+client/hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar
+client/curator-framework[\.\-_].*jar
+client/guava-11\.0\.2[\.\-_].*jar
+client/jsr305-3\.0\.0[\.\-_].*jar
+client/hadoop-yarn-client[\.\-_].*jar
+client/jackson-jaxrs-1\.9\.13[\.\-_].*jar
+client/commons-httpclient-3\.1[\.\-_].*jar
+client/commons-collections-3\.2\.[12][\.\-_].*jar
+client/netty-all[\.\-_].*jar
+client/slf4j-log4j12-1\.7\.10[\.\-_].*jar
+client/protobuf-java[\.\-_].*jar
+client/jackson-mapper-asl[\.\-_].*jar
+client/commons-logging-1\.1\.3[\.\-_].*jar
+client/commons-lang-2\.6[\.\-_].*jar
+lib
+lib/curator-recipes-2\.7\.1[\.\-_].*jar
+lib/commons-configuration-1\.6[\.\-_].*jar
+lib/commons-cli-1\.2[\.\-_].*jar
+lib/commons-digester-1\.8[\.\-_].*jar
+lib/curator-client-2\.7\.1[\.\-_].*jar
+lib/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
+lib/jsp-api-2\.1[\.\-_].*jar
+lib/jets3t-0\.9\.0[\.\-_].*jar
+lib/slf4j-api-1\.7\.10[\.\-_].*jar
+lib/jaxb-api-2\.2\.2[\.\-_].*jar
+lib/api-util-1\.0\.0-M20[\.\-_].*jar
+lib/jettison-1\.1[\.\-_].*jar
+lib/curator-framework-2\.7\.1[\.\-_].*jar
+lib/commons-io-2\.4[\.\-_].*jar
+lib/jetty-util-6\.1\.26[\.\-_].*jar
+lib/avro-1\.7\.[4-7][\.\-_].*jar
+lib/jaxb-impl-2\.2\.3-1[\.\-_].*jar
+lib/netty-3\.6\.2\.Final[\.\-_].*jar
+lib/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
+lib/jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
+lib/jersey-core-1\.9[\.\-_].*jar
+lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
+lib/gson-2\.2\.4[\.\-_].*jar
+lib/commons-net-3\.1[\.\-_].*jar
+lib/asm-3\.2[\.\-_].*jar
+lib/commons-compress-1\.4\.1[\.\-_].*jar
+lib/mockito-all-1\.8\.5[\.\-_].*jar
+lib/jackson-xc-1\.9\.13[\.\-_].*jar
+lib/junit-4\.11[\.\-_].*jar
+lib/jersey-json-1\.9[\.\-_].*jar
+lib/xmlenc-0\.52[\.\-_].*jar
+lib/api-asn1-api-1\.0\.0-M20[\.\-_].*jar
+lib/commons-codec-1\.4[\.\-_].*jar
+lib/jackson-core-asl-1\.9\.13[\.\-_].*jar
+lib/servlet-api-2\.5[\.\-_].*jar
+lib/paranamer-2\.3[\.\-_].*jar
+lib/native
+lib/native/libhadoop\.a
+lib/native/libhadoop\.so
+lib/native/libhdfs\.a
+lib/native/libsnappy\.so[.0-9]*
+lib/native/libsnappy\.so
+lib/native/libhadoop\.so[.0-9]*
+lib/native/libhadooputils\.a
+lib/native/libsnappy\.so[.0-9]*
+lib/native/libhadooppipes\.a
+lib/jetty-6\.1\.26[\.\-_].*jar
+lib/jersey-server-1\.9[\.\-_].*jar
+lib/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
+lib/commons-math3-3\.1\.1[\.\-_].*jar
+lib/log4j-1\.2\.17[\.\-_].*jar
+lib/hamcrest-core-1\.3[\.\-_].*jar
+lib/stax-api-1\.0-2[\.\-_].*jar
+lib/xz-1\.0[\.\-_].*jar
+lib/zookeeper-3\.4\.6[\.\-_].*jar
+lib/activation-1\.1[\.\-_].*jar
+lib/htrace-core-3\.1\.0-incubating[\.\-_].*jar
+lib/protobuf-java-2\.5\.0[\.\-_].*jar
+lib/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
+lib/java-xmlbuilder-0\.4[\.\-_].*jar
+lib/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
+lib/guava-11\.0\.2[\.\-_].*jar
+lib/jsr305-3\.0\.0[\.\-_].*jar
+lib/jackson-jaxrs-1\.9\.13[\.\-_].*jar
+lib/commons-httpclient-3\.1[\.\-_].*jar
+lib/commons-collections-3\.2\.[12][\.\-_].*jar
+lib/slf4j-log4j12-1\.7\.10[\.\-_].*jar
+lib/commons-logging-1\.1\.3[\.\-_].*jar
+lib/commons-lang-2\.6[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
new file mode 100644
index 0000000..b5e2265
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-hdfs","version":"2.7.3","classes":{"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean","methods":{"long getTotal()":{"name":"getTotal","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDeadNodes()":{"name":"getDeadNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"int getDistinctVersionCount()":{"name":"getDistinctVersionCount","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean getRollingUpgradeStatus()":{"name":"getRollingUpgradeStatus","returnType":"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Map getDistinctVersions()":{"name":"getDistinctVersions","returnType":"java.util.Map","args":[],"exceptions":[]},"int getThreads()":{"name":"getThreads","returnType
 ":"int","args":[],"exceptions":[]},"java.lang.String getJournalTransactionInfo()":{"name":"getJournalTransactionInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentBlockPoolUsed()":{"name":"getPercentBlockPoolUsed","returnType":"float","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getLiveNodes()":{"name":"getLiveNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockPoolUsedSpace()":{"name":"getBlockPoolUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSafemode()":{"name":"getSafemode","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCorruptFiles()":{"name":"getCorruptFiles","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getSoftwareVersion()":{"name":"getSoftwareVersion","returnType":"java.lang.String","args":[],"exceptions":[]
 },"long getTotalFiles()":{"name":"getTotalFiles","returnType":"long","args":[],"exceptions":[]},"long getCacheUsed()":{"name":"getCacheUsed","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameDirStatuses()":{"name":"getNameDirStatuses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCompileInfo()":{"name":"getCompileInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNodeUsage()":{"name":"getNodeUsage","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNumberOfMissingBlocksWithReplicationFactorOne()":{"name":"getNumberOfMissingBlocksWithReplicationFactorOne","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameJournalStatus()":{"name":"getNameJournalStatus","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNonDfsUsedSpace()":{"name":"getNonDfsUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNNStarted()":{"name":"ge
 tNNStarted","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentRemaining()":{"name":"getPercentRemaining","returnType":"float","args":[],"exceptions":[]},"boolean isUpgradeFinalized()":{"name":"isUpgradeFinalized","returnType":"boolean","args":[],"exceptions":[]},"long getTotalBlocks()":{"name":"getTotalBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getBlockPoolId()":{"name":"getBlockPoolId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getNumberOfMissingBlocks()":{"name":"getNumberOfMissingBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDecomNodes()":{"name":"getDecomNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getFree()":{"name":"getFree","returnType":"long","args":[],"exceptions":[]},"float getPercentUsed()":{"name":"getPercentUsed","returnType":"float","args":[],"exceptions":
 []},"long getCacheCapacity()":{"name":"getCacheCapacity","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean","methods":{"java.util.Map getDatanodeNetworkCounts()":{"name":"getDatanodeNetworkCounts","returnType":"java.util.Map","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"int getXceiverCount()":{"name":"getXceiverCount","returnType":"int","args":[],"exceptions":[]},"java.lang.String getHttpPort()":{"name":"getHttpPort","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNamenodeAddresses()":{"name":"getNamenodeAddresses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVolumeInfo()":{"name":"getVolumeInf
 o","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getRpcPort()":{"name":"getRpcPort","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.UnknownCipherSuiteException":{"name":"org.apache.hadoop.hdfs.UnknownCipherSuiteException","methods":{}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list
new file mode 100644
index 0000000..8887987
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-bin.list
@@ -0,0 +1 @@
+hdfs

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list
new file mode 100644
index 0000000..8355c58
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs-jar.list
@@ -0,0 +1,25 @@
+netty-3\.6\.2\.Final[\.\-_].*jar
+leveldbjni-all-1\.8[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+htrace-core-3\.1\.0-incubating[\.\-_].*jar
+commons-daemon-1\.0\.13[\.\-_].*jar
+commons-cli-1\.2[\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+guava-11\.0\.2[\.\-_].*jar
+jetty-6\.1\.26[\.\-_].*jar
+commons-logging-1\.1\.3[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+commons-codec-1\.4[\.\-_].*jar
+xml-apis-1\.3\.04[\.\-_].*jar
+xercesImpl-2\.9\.1[\.\-_].*jar
+xmlenc-0\.52[\.\-_].*jar
+commons-lang-2\.6[\.\-_].*jar
+netty-all-4\.0\.23\.Final[\.\-_].*jar
+jetty-util-6\.1\.26[\.\-_].*jar
+jsr305-3\.0\.0[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+servlet-api-2\.5[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list
new file mode 100644
index 0000000..12565fd
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-hdfs.list
@@ -0,0 +1,79 @@
+webapps
+webapps/journal
+webapps/journal/index\.html
+webapps/journal/WEB-INF
+webapps/journal/WEB-INF/web\.xml
+webapps/secondary
+webapps/secondary/index\.html
+webapps/secondary/status\.html
+webapps/secondary/WEB-INF
+webapps/secondary/WEB-INF/web\.xml
+webapps/secondary/snn\.js
+webapps/hdfs
+webapps/hdfs/dfshealth\.html
+webapps/hdfs/index\.html
+webapps/hdfs/explorer\.js
+webapps/hdfs/dfshealth\.js
+webapps/hdfs/WEB-INF
+webapps/hdfs/WEB-INF/web\.xml
+webapps/hdfs/explorer\.html
+webapps/datanode
+webapps/datanode/index\.html
+webapps/datanode/robots\.txt
+webapps/datanode/WEB-INF
+webapps/datanode/WEB-INF/web\.xml
+webapps/nfs3
+webapps/nfs3/WEB-INF
+webapps/nfs3/WEB-INF/web\.xml
+webapps/static
+webapps/static/hadoop\.css
+webapps/static/bootstrap-3\.0\.2
+webapps/static/bootstrap-3\.0\.2/fonts
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.svg
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.eot
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.woff
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.ttf
+webapps/static/bootstrap-3\.0\.2/css
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.css
+webapps/static/bootstrap-3\.0\.2/js
+webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.js
+webapps/static/jquery-1\.10\.([2-9]|[3-9]\d+).*\.js
+webapps/static/dust-helpers-1\.1\.([1-9]|[2-9]\d+).*\.js
+webapps/static/dust-full-2\.0\.\d+.*\.js
+webapps/static/dfs-dust\.js
+hadoop-hdfs\.jar
+bin
+bin/hdfs
+sbin
+sbin/distribute-exclude\.sh
+sbin/refresh-namenodes\.sh
+hadoop-hdfs-nfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
+hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
+hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
+hadoop-hdfs-nfs\.jar
+lib
+lib/commons-daemon-1\.0\.(1[3-9]|[2-9]\d).*\.jar
+lib/commons-cli-1\.([2-9]|[3-9]\d+).*\.jar
+lib/leveldbjni-all-1\.([8-9]|[9-9]\d+).*\.jar
+lib/commons-io-2\.([4-9]|[5-9]\d+).*\.jar
+lib/jetty-util-6\.1\.(2[6-9]|[3-9]\d).*\.jar
+lib/xercesImpl-2\.9\.([1-9]|[2-9]\d+).*\.jar
+lib/netty-3\.6\.([2-9]|[3-9]\d+).*\.jar
+lib/jersey-core-1\.(9|[1-9]\d+).*\.jar
+lib/jackson-mapper-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar
+lib/asm-3\.([2-9]|[3-9]\d+).*\.jar
+lib/netty-all-4\.0\.(2[3-9]|[3-9]\d).*\.jar
+lib/xmlenc-0\.(5[2-9]|[6-9]\d).*\.jar
+lib/commons-codec-1\.([4-9]|[5-9]\d+).*\.jar
+lib/jackson-core-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar
+lib/servlet-api-2\.([5-9]|[6-9]\d+).*\.jar
+lib/jetty-6\.1\.(2[6-9]|[3-9]\d).*\.jar
+lib/jersey-server-1\.(9|[1-9]\d+).*\.jar
+lib/xml-apis-1\.3\.(0[4-9]|[1-9]\d).*\.jar
+lib/log4j-1\.2\.(1[7-9]|[2-9]\d).*\.jar
+lib/htrace-core-3\.1\.\d+.*\.jar
+lib/protobuf-java-2\.5\.\d+.*\.jar
+lib/guava-11\.0\.([2-9]|[3-9]\d+).*\.jar
+lib/jsr305-3\.0\.\d+.*\.jar
+lib/commons-logging-1\.1\.([3-9]|[4-9]\d+).*\.jar
+lib/commons-lang-2\.([6-9]|[7-9]\d+).*\.jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list
new file mode 100644
index 0000000..0a7a9c5
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-mapreduce-bin.list
@@ -0,0 +1 @@
+mapred


[19/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
new file mode 100644
index 0000000..6ad5f18
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-api","version":"2.7.3","classes":{"org.apache.hadoop.yarn.api.records.ApplicationAccessType":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","methods":{"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAccessType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","methods":{"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest newInstance
 (java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","methods":{"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getContainerLaunchContext()":{"na
 me":"getContainerLaunchContext","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest newInstance(org.apache.hadoop.yarn.api.records.ContainerLaunchContext, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"na
 me":"setContainerLaunchContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","methods":{"void setBlacklistAdditions(java.util.List)":{"name":"setBlacklistAdditions","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getBlacklistRemovals()":{"name":"getBlacklistRemovals","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getBlacklistAdditions()":{"name":"getBlacklistAdditions","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest newInstance(java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":["java.util.List","java.util.List"],"exceptions":[]},"void setBlacklistRemovals(java.util.List)":{"name":"
 setBlacklistRemovals","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationAttemp
 tState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","methods":{"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics":{"name":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":["int"],"exceptions":[]},"int getNumNodeManagers()":{"name":"getNumNodeManagers","returnType":"int","args":[],"exceptions":[]},"void setNumNodeManagers(int)":{"name":"
 setNumNodeManagers","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","methods":{"java.util.List getIncreaseRequests()":{"name":"getIncreaseRequests","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"void setAskList(java.util.List)":{"name":"setAskList","returnType":"void","args":["java.util.List"],"exception
 s":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"java.util.List getReleaseList()":{"name":"getReleaseList","returnType":"java.util.List","args":[],"exceptions":[]},"void setIncreaseRequests(java.util.List)":{"name":"setIncreaseRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest getResourceBlacklistRequest()":{"name":"getResourceBlacklistRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"voi
 d setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"void setResourceBlacklistRequest(org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"setResourceBlacklistRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"java.util.List getAskList()":{"name":"getAskList","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"void setReleaseList(java.util.List)":{"name":"setReleaseList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","methods":{"void setQueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"setQueueInfo","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":
 []},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse newInstance(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo()":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationReport","methods":{"void setApplicationResourceUsageReport(org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport)":{"name":"setApplicationResourceUsageReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport"],"exceptions":[]},"long getFinishTime()":{"name":"getFinishTime","returnType":"long","args":[],"exceptions":[]},"void setFinalApplicationStatus(
 org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport getApplicationResourceUsageReport()":{"name":"getApplicationResourceUsageReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"jav
 a.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.ApplicationAttemptId, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, org.apache.hadoop.yarn.api.records.Token, org.apache.hadoop.yarn.api.records.YarnApplicationState, java.lang.String, java.lang.String, long, long, org.apache.hadoop.yarn.api.records.FinalApplicationStatus, org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport, java.lang.String, float, java.lang.String, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.ApplicationAttemptId","java.lang.String","java.lang.String","java.lang.String","java.lang.String","int","org.apache.hadoop.yarn.api.records.Token","org.apache.hadoo
 p.yarn.api.records.YarnApplicationState","java.lang.String","java.lang.String","long","long","org.apache.hadoop.yarn.api.records.FinalApplicationStatus","org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","java.lang.String","float","java.lang.String","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getClientToAMToken()":{"name":"getClientToAMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setYarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState)":{"name":"setYarnApplicationState","returnType":"void","args":["org.apache.hadoo
 p.yarn.api.records.YarnApplicationState"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"void setStartTime(long)":{"name":"setStartTime","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOriginalTrackingUrl
 (java.lang.String)":{"name":"setOriginalTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState getYarnApplicationState()":{"name":"getYarnApplicationState","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setClientToAMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setClientToAMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"excepti
 ons":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"java.lang.String getOriginalTrackingUrl()":{"name":"getOriginalTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFinishTime(long)":{"name":"setFinishTime","returnType":"void","args":["long"],"exceptions":[]},"void setCurrentApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"setCurrentApplicationAttemptId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getCurrentApplicationAttemptId()":{"name":"getCurrentApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Resource":{"name":"org.apache.hadoop.yarn.api.records.Resource","methods":{"int hashCode()":{"name":"hashCo
 de","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource newInstance(int, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":["int","int"],"exceptions":[]},"void setVirtualCores(int)":{"name":"setVirtualCores","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMemory(int)":{"name":"setMemory","returnType":"void","args":["int"],"exceptions":[]},"int getMemory()":{"name":"getMemory","returnType":"int","args":[],"exceptions":[]},"int getVirtualCores()":{"name":"getVirtualCores","returnType":"int","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeReport":{"name":"org.apache.hadoop.yarn.api.records.NodeReport","methods":{"void setCapability(org.apache.hado
 op.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsed()":{"name":"getUsed","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"long getLastHealthReportTime()":{"name":"getLastHealthReportTime","returnType":"long","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void setNodeLabels(java.util.Set)":{"name":"setNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.ap
 i.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setHealthReport(java.lang.String)":{"name":"setHealthReport","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setRackName(java.lang.String)":{"name":"setRackName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setLastHealthReportTime(long)":{"name":"setLastHealthReportTime","returnType":"void","args":["long"],"exceptions":[]},"void setHttpAddress(java.lang.String)":{"name":"setHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRackName()":{"name":"getRackName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setUsed(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsed","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getHealthReport()":{"name":"getHealthRe
 port","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNodeState(org.apache.hadoop.yarn.api.records.NodeState)":{"name":"setNodeState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeState"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeState getNodeState()":{"name":"getNodeState"
 ,"returnType":"org.apache.hadoop.yarn.api.records.NodeState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long, java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long","java.util.Set"],"exceptions":[]},"java.lang.String getHttpAddress()":{"name":"getHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"
 exceptions":[]},"java.util.Set getNodeLabels()":{"name":"getNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Priority":{"name":"org.apache.hadoop.yarn.api.records.Priority","methods":{"void setPriority(int)":{"name":"setPriority","returnType":"void","args":["int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.Priority)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":["int"],"exceptions":[]},"int getPriority()":{"name":"getPriority","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)
 ":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"int getAttemptId()":{"name":"getAttemptId","returnType":"int","args":[],"except
 ions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":["org.apache.hadoop.yarn.api.records.ApplicationId","int"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NMToken":{"name":"org.apache.hadoop.yarn.api.records.NMToken","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":
 "org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getToken()":{"name":"getToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NMToken newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NMToken","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicati
 onMasterRequest","methods":{"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest newInstance(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String",
 "java.lang.String"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetC
 lusterMetricsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","methods":{"java.util.List getStartContainerRequests()":{"name":"getStartContainerRequests","returnType":"java.util.List","args":[],"exceptions":[]},"void setStartContainerRequests(java.util.List)":{"name":"setStartContainerRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext":{"name":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","methods":{"ja
 va.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.Map getServiceData()":{"name":"getServiceData","returnType":"java.util.Map","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.Map getLocalResources()":{"name":"getLocalResources","returnType":"java.util.Map","args":[],"exceptions":[]},"void setServiceData(java.util.Map)":{"name":"setServiceData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext newInstance(java.util.Map, java.util.Map, java.util.List, java.util.Map, java.nio.ByteBuffer, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":["java.util.Map","java.util.Map","java.util.List","java.util.Map","java.nio.ByteBuffer","java.util.Map"],"exceptions"
 :[]},"java.util.Map getEnvironment()":{"name":"getEnvironment","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getCommands()":{"name":"getCommands","returnType":"java.util.List","args":[],"exceptions":[]},"java.nio.ByteBuffer getTokens()":{"name":"getTokens","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setLocalResources(java.util.Map)":{"name":"setLocalResources","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setCommands(java.util.List)":{"name":"setCommands","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setTokens(java.nio.ByteBuffer)":{"name":"setTokens","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setEnvironment(java.util.Map)":{"name":"setEnvironment","returnType":"void","args":["java.util.Map"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportReque
 st","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","methods":{"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"excep
 tions":[]},"void setSuccessfullyStoppedContainers(java.util.List)":{"name":"setSuccessfullyStoppedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getSuccessfullyStoppedContainers()":{"name":"getSuccessfullyStoppedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueState":{"name":"org.apache.hadoop.yarn.api.records.QueueState","methods":{"org.apache.hadoop.yarn.api.records.QueueState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":["
 java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationId","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId newInstance(long, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["long","int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"long getClusterTimestamp()":{"name":"getClusterTimestamp","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"
 ],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getClusterMetrics()":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse newInstance(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exc
 eptions":[]},"void setClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"setClusterMetrics","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["java.util.List"],"exceptions":[]},"java.util.List getUserAclsInfoList()":{"name":"getUserAclsInfoList","returnType":"java.util.List","args":[],"exceptions":[]},"void setUserAclsInfoList(java.util.List)":{"name":"setUserAclsInfoList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus":{"name":"org.apache.hadoop.
 yarn.api.records.FinalApplicationStatus","methods":{"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","methods":{"java.util.List getApplicationList()":{"name":"getApplicationList","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","args":["java.util.List"],"exceptions":[]},"voi
 d setApplicationList(java.util.List)":{"name":"setApplicationList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationState","methods":{"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.URL":{"name":"org.apache.hadoop.yarn.api.records.URL","methods":{"java.lang.String getFile()":{"name":"getFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPort(int)":{"name":"setPort","returnType":"void","args":["int"],"exceptions":[]},"void setUserInfo(java.la
 ng.String)":{"name":"setUserInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setScheme(java.lang.String)":{"name":"setScheme","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFile(java.lang.String)":{"name":"setFile","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUserInfo()":{"name":"getUserInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL newInstance(java.lang.String, java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.URL","args":["java.lang.String","
 java.lang.String","int","java.lang.String"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","methods":{"void setIsUnregistered(boolean)":{"name":"setIsUnregistered","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["boolean"],"exceptions":[]},"boolean getIsUnregistered()":{"name":"getIsUnregistered","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationMasterProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationMasterProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse fi
 nishApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"finishApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.Yarn
 Exception","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","methods":{"void setApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"setApplicationReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport()":{"name":"getApplicationReport","returnType
 ":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds"
 ,"returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","methods":{"void setStartRange(long, long) throws java.lang.IllegalArgumentException":{"name":"setStartRange","returnType":"void","args":["long","long"],"exceptions":["java.lang.IllegalArgumentException"]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope getScope()":{"name":"getScope","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","args":[],"exceptions":[]},"void setScope(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"setScope","returnType":"void","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protoco
 lrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope, java.util.Set, java.util.Set, java.util.Set, java.util.Set, java.util.EnumSet, org.apache.commons.lang.math.LongRange, org.apache.commons.lang.math.LongRange, java.lang.Long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","java.util.Set","java.util.Set","java.util.Set","java.util.Set","java.util.EnumSet","org.apache.commons.lang.math.LongRange","org.apache.commons.lang.math.LongRange","java.lang.Lo
 ng"],"exceptions":[]},"long getLimit()":{"name":"getLimit","returnType":"long","args":[],"exceptions":[]},"java.util.EnumSet getApplicationStates()":{"name":"getApplicationStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setFinishRange(org.apache.commons.lang.math.LongRange)":{"name":"setFinishRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"void setUsers(java.util.Set)":{"name":"setUsers","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getFinishRange()":{"name":"getFinishRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setApplicationStates(java.util.EnumSet)":{"name":"setApplicationStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.pro
 tocolrecords.GetApplicationsRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.EnumSet"],"exceptions":[]},"java.util.Set getQueues()":{"name":"getQueues","returnType":"java.util.Set","args":[],"exceptions":[]},"java.util.Set getUsers()":{"name":"getUsers","returnType":"java.util.Set","args":[],"exceptions":[]},"void setLimit(long)":{"name":"setLimit","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set, java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set","java.util.EnumSet"],"exceptions":[]},"void setApplicationStates(java.util.Set)":{"name":"setApplicationStates","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getStartRange()":{"name":
 "getStartRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTypes(java.util.Set)":{"name":"setApplicationTypes","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setQueues(java.util.Set)":{"name":"setQueues","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setFinishRange(long, long)":{"name":"setFinishRange","returnType":"void","args":["long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set"],"exceptions":[]},"void setStartRange(org.apache.commons.lang.math.LongRange)":{"name":"setStartRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance()":{"name":"newInstance","returnType":"org
 .apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":[],"exceptions":[]},"java.util.Set getApplicationTypes()":{"name":"getApplicationTypes","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceType":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceType","methods":{"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","methods":{"long getVcoreSeconds()":{"name":"getVcoreSeconds","returnType":"long","args":[],"exceptions":[]},"i
 nt getNumUsedContainers()":{"name":"getNumUsedContainers","returnType":"int","args":[],"exceptions":[]},"long getMemorySeconds()":{"name":"getMemorySeconds","returnType":"long","args":[],"exceptions":[]},"void setMemorySeconds(long)":{"name":"setMemorySeconds","returnType":"void","args":["long"],"exceptions":[]},"void setUsedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setNeededResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setNeededResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getReservedResources()":{"name":"getReservedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getNeededResources()":{"name":"getNeededResources","returnType":"org.apa
 che.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNumUsedContainers(int)":{"name":"setNumUsedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setNumReservedContainers(int)":{"name":"setNumReservedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setVcoreSeconds(long)":{"name":"setVcoreSeconds","returnType":"void","args":["long"],"exceptions":[]},"int getNumReservedContainers()":{"name":"getNumReservedContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport newInstance(int, int, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":["int","int","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop
 .yarn.api.records.Resource","long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsedResources()":{"name":"getUsedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setReservedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setReservedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest newInstance(java.lang.String, boolean, boolean, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","args":["java.lang.String","boolean","boolean","boolean"],"excep
 tions":[]},"boolean getRecursive()":{"name":"getRecursive","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setIncludeChildQueues(boolean)":{"name":"setIncludeChildQueues","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIncludeApplications()":{"name":"getIncludeApplications","returnType":"boolean","args":[],"exceptions":[]},"boolean getIncludeChildQueues()":{"name":"getIncludeChildQueues","returnType":"boolean","args":[],"exceptions":[]},"void setRecursive(boolean)":{"name":"setRecursive","returnType":"void","args":["boolean"],"exceptions":[]},"void setIncludeApplications(boolean)":{"name":"setIncludeApplications","returnType":"void","args":["boolean"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","methods":{"void setIncreasedContainers(ja
 va.util.List)":{"name":"setIncreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setDecreasedContainers(java.util.List)":{"name":"setDecreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","java.util.List","java.util.List"],"exceptions":[]},"void setUpdatedNodes(java.util.Li
 st)":{"name":"setUpdatedNodes","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"java.util.List getNMTokens()":{"name":"getNMTokens","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getUpdatedNodes()":{"name":"getUpdatedNodes","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getIncreasedContainers()":{"name":"getIncreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.AMCommand getAMCommand()":{"name":"getAMCommand","returnType":"org.apache.hadoop.yarn.api.records.AMCommand","args":[],"exceptions":[]},"void setNMTokens(java.util.List)":{"name":"setNMTokens","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"e
 xceptions":[]},"void setAMCommand(org.apache.hadoop.yarn.api.records.AMCommand)":{"name":"setAMCommand","returnType":"void","args":["org.apache.hadoop.yarn.api.records.AMCommand"],"exceptions":[]},"void setAllocatedContainers(java.util.List)":{"name":"setAllocatedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"int getNumClusterNodes()":{"name":"getNumClusterNodes","returnType":"int","args":[],"exceptions":[]},"void setNumClusterNodes(int)":{"name":"setNumClusterNodes","returnType":"void","args":["int"],"exceptions":[]},"void setCompletedContainersStatuses(java.util.List)":{"name":"setCompletedContainersStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.PreemptionMessage getPreemptionMessage()":{"name":"getPreemptionMessage","re
 turnType":"org.apache.hadoop.yarn.api.records.PreemptionMessage","args":[],"exceptions":[]},"java.util.List getCompletedContainersStatuses()":{"name":"getCompletedContainersStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setAvailableResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setAvailableResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, org.apache.hadoop.yarn.api.records.Token, java.util.List, java.util.List)":{"na
 me":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","org.apache.hadoop.yarn.api.records.Token","java.util.List","java.util.List"],"exceptions":[]},"void setPreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage)":{"name":"setPreemptionMessage","returnType":"void","args":["org.apache.hadoop.yarn.api.records.PreemptionMessage"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.pro
 tocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List"],"exceptions":[]},"java.util.List getAllocatedContainers()":{"name":"getAllocatedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"java.util.List getDecreasedContainers()":{"name":"getDecreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","methods":{"org.apache.hadoop.yarn.api.records.LocalResourceVisibility valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":["java.lang.String"],"excep
 tions":[]},"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUserAcls(java.util.List)":{"name":"setUserAcls","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getUserAcls()":{"name":"getUserAcls","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo newInstance(java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","a
 rgs":["java.lang.String","java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","methods":{"void setRenewer(java.lang.String)":{"name":"setRenewer","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest newInstance(java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRenewer()":{"name":"getRenewer","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerState":{"name":"org.apache.hadoop.yarn.api.records.ContainerState","methods":{"org.apache.hadoop.yarn.api.records.ContainerState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","a
 rgs":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.ContainerState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ContainerState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ContainerManagementProtocol":{"name":"org.apache.hadoop.yarn.api.ContainerManagementProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse getContainerStatuses(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerSt
 atuses","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse startContainers(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse stopContainers(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name
 ":"stopContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.exceptions.YarnException":{"name":"org.apache.hadoop.yarn.exceptions.YarnException","methods":{}},"org.apache.hadoop.yarn.api.records.QueueInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setCurrentCapacity(float)":{"name":"setCurrentCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setCapacity(float)":{"name":"setCapacity","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getChildQueues()":{"
 name":"getChildQueues","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo newInstance(java.lang.String, float, float, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.QueueState, java.util.Set, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String","float","float","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.QueueState","java.util.Set","java.lang.String"],"exceptions":[]},"void setDefaultNodeLabelExpression(java.lang.String)":{"name":"setDefaultNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.util.List getApplications()":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":[]},"float getCapacity()":{"name":"getCapacity","returnType":"float","args":[],"exceptions":[]},"float getCurrentCapacity()":{"name":"getCurrentCapacity","returnType
 ":"float","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueState getQueueState()":{"name":"getQueueState","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":[],"exceptions":[]},"void setChildQueues(java.util.List)":{"name":"setChildQueues","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setApplications(java.util.List)":{"name":"setApplications","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getDefaultNodeLabelExpression()":{"name":"getDefaultNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumCapacity(float)":{"name":"setMaximumCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setQueueState(org.apache.hadoop.yarn.api.records.QueueState)":{"name":"setQueueState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueState"],"exceptions":[]},"void setAccessibleNodeLabels(java.util.Set)":{"name":"setAccessibleNodeLabels
 ","returnType":"void","args":["java.util.Set"],"exceptions":[]},"float getMaximumCapacity()":{"name":"getMaximumCapacity","returnType":"float","args":[],"exceptions":[]},"java.util.Set getAccessibleNodeLabels()":{"name":"getAccessibleNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","methods":{"void setRMDelegationToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setRMDelegationToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse newInstance(org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.To
 ken getRMDelegationToken()":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeId":{"name":"org.apache.hadoop.yarn.api.records.NodeId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId newInstance(java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":["java.lang.String","int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int",
 "args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"void setApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"setApplicationSubmissionContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"excep
 tions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["boolean"],"exceptions":[]},"void setIsKillCompleted(boolean)":{"name":"setIsKillCompleted","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIsKillCompleted()":{"name":"getIsKillCompleted","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationClientProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationClientProtocol"
 ,"methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplication(org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNewApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse getClusterNodes(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse getLabelsToNodes(org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest"],"exceptions":["org.apache.hadoop.yar
 n.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop
 .yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse getQueueUserAcls(org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueUserAcls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicat
 ionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse forceKillApplication(org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"forceKillApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse getNodeToLabels(org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"org.apache.hadoop.yarn.api.protocolr
 ecords.GetNodesToLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse getQueueInfo(org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.ha
 doop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse getClusterMetrics(org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse getClusterNodeLabels(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabe
 ls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse
 ":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.ya
 rn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerId":{"name":"org.apache.hadoop.yarn.api.records.ContainerId","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newInstance(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","int"],"exceptions":[]
 },"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"long getContainerId()":{"name":"getContainerId","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newContainerId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, long)":{"name":"newContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getApplicationAttemptId()":{"name":"getApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId fromString(java.lang.String)":{"name":"fromString","returnType":"org.apache.
 hadoop.yarn.api.records.ContainerId","args":["java.lang.String"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Container":{"name":"org.apache.hadoop.yarn.api.records.Container","methods":{"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"java.lang.String getNodeHttpAddress()":{"name":"getNodeHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getId()":{"name":"getId","returnType":"org.apache.hadoop.y
 arn.api.records.ContainerId","args":[],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Container newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.record
 s.Priority, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Container","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNodeHttpAddress(java.lang.String)":{"name":"setNod
 eHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean","java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"s
 etCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int"],"exceptions":[]},"void setRelaxLocality(boolean)":{"name":"setRelaxLocality","returnType":"void","args":["boolean"],"exceptions":[]},"void setResourceName(java.lang.String)":{"name":"setResourceName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.R
 esource","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean"],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getR
 elaxLocality()":{"name":"getRelaxLocality","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"java.lang.String getResourceName()":{"name":"getResourceName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"boolean isAnyLocation(java.lang.String)":{"name":"isAnyLocation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerStatus":{"name":"org.apache.hadoop.yarn.api.records.ContainerStatus","methods":{"org.apache.hadoop.yarn.api.records.ContainerStat
 e getState()":{"name":"getState","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getContainerId()":{"name":"getContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"int getExitStatus()":{"name":"getExitStatus","returnType":"int","args":[],"exceptions":[]},"void setExitStatus(int)":{"name":"setExitStatus","returnType":"void","args":["int"],"exceptions":[]},"void setState(org.apache.hadoop.yarn.api.records.ContainerState)":{"name":"setState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerState"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerStatus newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.ContainerState, java.lang.String, int
 )":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.ap

<TRUNCATED>

[13/50] [abbrv] bigtop git commit: Sort of working prototype. Lots of hard coded pieces at the moment.

Posted by rv...@apache.org.
Sort of working prototype.  Lots of hard coded pieces at the moment.

(cherry picked from commit 83cfc6ab0891e2c7b73b8c72ea4ea20d98b774bf)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/f9c6c65c
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/f9c6c65c
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/f9c6c65c

Branch: refs/heads/master
Commit: f9c6c65c35838af182af35dce462419f19417927
Parents: c313795
Author: Alan Gates <ga...@hortonworks.com>
Authored: Mon Nov 7 15:52:37 2016 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:12 2017 -0700

----------------------------------------------------------------------
 .../src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/f9c6c65c/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 4a733d6..6fcfe37 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.hive.hcatalog.data.DefaultHCatRecord;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
@@ -74,6 +75,7 @@ public class HCatalogMR extends Configured implements Tool {
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
     job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));
+    job.addCacheArchive(new URI("hdfs:/user/gates/libfb303-0.9.2.jar"));
 
     return job.waitForCompletion(true) ? 0 : 1;
 
@@ -121,4 +123,9 @@ public class HCatalogMR extends Configured implements Tool {
       context.write(null, output);
     }
   }
+
+  public static void main(String[] args) throws Exception {
+    int exitCode = ToolRunner.run(new HCatalogMR(), args);
+    System.exit(exitCode);
+  }
  }


[21/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
new file mode 100644
index 0000000..b5e2265
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-hdfs","version":"2.7.3","classes":{"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean","methods":{"long getTotal()":{"name":"getTotal","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDeadNodes()":{"name":"getDeadNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"int getDistinctVersionCount()":{"name":"getDistinctVersionCount","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean getRollingUpgradeStatus()":{"name":"getRollingUpgradeStatus","returnType":"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Map getDistinctVersions()":{"name":"getDistinctVersions","returnType":"java.util.Map","args":[],"exceptions":[]},"int getThreads()":{"name":"getThreads","returnType
 ":"int","args":[],"exceptions":[]},"java.lang.String getJournalTransactionInfo()":{"name":"getJournalTransactionInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentBlockPoolUsed()":{"name":"getPercentBlockPoolUsed","returnType":"float","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getLiveNodes()":{"name":"getLiveNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockPoolUsedSpace()":{"name":"getBlockPoolUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSafemode()":{"name":"getSafemode","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCorruptFiles()":{"name":"getCorruptFiles","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getSoftwareVersion()":{"name":"getSoftwareVersion","returnType":"java.lang.String","args":[],"exceptions":[]
 },"long getTotalFiles()":{"name":"getTotalFiles","returnType":"long","args":[],"exceptions":[]},"long getCacheUsed()":{"name":"getCacheUsed","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameDirStatuses()":{"name":"getNameDirStatuses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCompileInfo()":{"name":"getCompileInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNodeUsage()":{"name":"getNodeUsage","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNumberOfMissingBlocksWithReplicationFactorOne()":{"name":"getNumberOfMissingBlocksWithReplicationFactorOne","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameJournalStatus()":{"name":"getNameJournalStatus","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNonDfsUsedSpace()":{"name":"getNonDfsUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNNStarted()":{"name":"ge
 tNNStarted","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentRemaining()":{"name":"getPercentRemaining","returnType":"float","args":[],"exceptions":[]},"boolean isUpgradeFinalized()":{"name":"isUpgradeFinalized","returnType":"boolean","args":[],"exceptions":[]},"long getTotalBlocks()":{"name":"getTotalBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getBlockPoolId()":{"name":"getBlockPoolId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getNumberOfMissingBlocks()":{"name":"getNumberOfMissingBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDecomNodes()":{"name":"getDecomNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getFree()":{"name":"getFree","returnType":"long","args":[],"exceptions":[]},"float getPercentUsed()":{"name":"getPercentUsed","returnType":"float","args":[],"exceptions":
 []},"long getCacheCapacity()":{"name":"getCacheCapacity","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean","methods":{"java.util.Map getDatanodeNetworkCounts()":{"name":"getDatanodeNetworkCounts","returnType":"java.util.Map","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"int getXceiverCount()":{"name":"getXceiverCount","returnType":"int","args":[],"exceptions":[]},"java.lang.String getHttpPort()":{"name":"getHttpPort","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNamenodeAddresses()":{"name":"getNamenodeAddresses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVolumeInfo()":{"name":"getVolumeInf
 o","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getRpcPort()":{"name":"getRpcPort","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.UnknownCipherSuiteException":{"name":"org.apache.hadoop.hdfs.UnknownCipherSuiteException","methods":{}}}}
\ No newline at end of file


[34/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
deleted file mode 100644
index 6061c5e..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-client-core-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-mapreduce-client-core","version":"2.7.3","classes":{"org.apache.hadoop.mapred.FixedLengthInputFormat":{"name":"org.apache.hadoop.mapred.FixedLengthInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setRecordLength(org.apache.hadoop.conf.Configuration, int)":{"name":"setRecordLength","returnType":"void","args":["org.apache.hadoop.conf.Configuration","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"int getRecordLength(org.apache.hadoop.conf.Configu
 ration)":{"name":"getRecordLength","returnType":"int","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileSplit":{"name":"org.apache.hadoop.mapred.lib.CombineFileSplit","methods":{"org.apache.hadoop.mapred.JobConf getJob()":{"name":"getJob","returnType":"org.apache.hadoop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) t
 hrows java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"except
 ions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.HashPartitioner":{"name":"org.apache.hadoop.mapred.lib.HashPartitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputFormat":{"name":"org.apache.hadoop.mapreduce.OutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.CounterGroup":{"name":"org.apache.hadoop.mapreduce.CounterGroup","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Clas
 s;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void setAggregatorDescriptors(org.apache.hadoop.mapred.JobConf, [Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","[Ljava.lang.Class;"],"exceptions":[]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"void main([Ljava.lang.String;) throws java.io.IOException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, [Ljava.la
 ng.Class;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;","[Ljava.lang.Class;","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapred.jobcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.JobConf","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobConf createValueAggregatorJob([Ljava.lang.String;, java.lang.Class) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapred.J
 obConf","args":["[Ljava.lang.String;","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.InvalidInputException":{"name":"org.apache.hadoop.mapreduce.lib.input.InvalidInputException","methods":{"java.util.List getProblems()":{"name":"getProblems","returnType":"java.util.List","args":[],"exceptions":[]},"java.lang.String getMessage()":{"name":"getMessage","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.UserDefinedValueAggregatorDescriptor","methods":{"java.lang.Object createInstance(java.lang.String)":{"name":"createInstance","returnType":"java.lang.Object","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.Counters$Counter":{"name
 ":"org.apache.hadoop.mapred.Counters$Counter","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"boolean contentEquals(org.apache.hadoop.mapred.Counters$Counter)":{"name":"contentEquals","returnType":"boolean","args":["org.apache.hadoop.mapred.Counters$Counter"],"exceptions":[]},"java.lang.String makeEscapedCompactString()":{"name":"makeEscapedCompactString","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String getName()":{"name"
 :"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getCounter()":{"name":"getCounter","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper":{"name":"org.apache.hadoop.mapred.lib.CombineFileRecordReaderWrapper","methods":{"long getPos() throws java.io.IOException"
 :{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.LongSumReducer":{"name":"org.apache.hadoop.mapred.lib.LongSumReducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.had
 oop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.CombineFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength(int)":{"name":"getLength","returnType":"long","args":["int"],"exceptions":[]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"[Lorg.apache.hadoop.fs.Path; getPaths()":{"name":"getPaths","returnType":"[Lorg.apache.hadoop.fs.Path;","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.
 DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getOffset(int)":{"name":"getOffset","returnType":"long","args":["int"],"exceptions":[]},"org.apache.hadoop.fs.Path getPath(int)":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":["int"],"exceptions":[]},"[J getLengths()":{"name":"getLengths","returnType":"[J","args":[],"exceptions":[]},"[J getStartOffsets()":{"name":"getStartOffsets","returnType":"[J","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumPaths()":{"name":"getNumPaths","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBConfiguration":{"name":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","methods":{"java.lang.String getInputQuery()":{"name":"getInputQuery","re
 turnType":"java.lang.String","args":[],"exceptions":[]},"void setInputClass(java.lang.Class)":{"name":"setInputClass","returnType":"void","args":["java.lang.Class"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setOutputFieldCount(int)":{"name":"setOutputFieldCount","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getInputTableName()":{"name":"getInputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"[Ljava.lang.String; getInputFieldNames()":{"name":"getInputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setOutputTableName(java.lang.String)":{"name":"setOutputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.sql.Connection getConnection() throws java.sql.SQLException, java.lang.ClassNotFoundException":{"name":"getConnection","returnType":"java.sql.Connectio
 n","args":[],"exceptions":["java.sql.SQLException","java.lang.ClassNotFoundException"]},"java.lang.String getInputBoundingQuery()":{"name":"getInputBoundingQuery","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getInputOrderBy()":{"name":"getInputOrderBy","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.Class getInputClass()":{"name":"getInputClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"void setInputTableName(java.lang.String)":{"name":"setInputTableName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputCountQuery(java.lang.String)":{"name":"setInputCountQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputOrderBy(java.lang.String)":{"name":"setInputOrderBy","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getOutputFieldCount()":{"name":"getOutputFieldCount","returnType":"int","args":[],"exceptions":[]},"void setInputConditions(ja
 va.lang.String)":{"name":"setInputConditions","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputQuery(java.lang.String)":{"name":"setInputQuery","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getInputConditions()":{"name":"getInputConditions","returnType":"java.lang.String","args":[],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void configureDB(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String)":{"name":"configureDB","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String"],"exceptions":[]},"void setInputBoundingQuery(java.lang.String)":{"name":"setInputBoundingQuery","
 returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setInputFieldNames([Ljava.lang.String;)":{"name":"setInputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"[Ljava.lang.String; getOutputFieldNames()":{"name":"getOutputFieldNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.String getOutputTableName()":{"name":"getOutputTableName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setOutputFieldNames([Ljava.lang.String;)":{"name":"setOutputFieldNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]},"java.lang.String getInputCountQuery()":{"name":"getInputCountQuery","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.Partitioner":{"name":"org.apache.hadoop.mapred.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int
 "],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat","methods":{"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOE
 xception":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJobBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],
 "exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader","methods":{"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int findSeparator([B, int, int, byte)":{"name":"findSeparator","returnType":"int","args":["[B","int","int","byte"],"exceptions":[]},"java.lang.Class getKeyClass()":{"name":"getKeyClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"org.apache.hadoop.io.Text getCurrentValue()":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType"
 :"float","args":[],"exceptions":["java.io.IOException"]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.Text getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.Text","args":[],"exceptions":[]},"boolean nextKeyValue() throws java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.io.IOException"]},"void setKeyValue(org.apache.hadoop.io.Text, org.apache.hadoop.io.Text, [B, int, int)":{"name":"setKeyValue","returnType":"void","args":["org.apache.hadoop.io.Text","org.apache.hadoop.io.Text","[B","int","int"],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","
 returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader","methods":{}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter","methods":{"void setFilterClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setFilterClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":
 ["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.chain.ChainMapper":{"name":"org.apache.hadoop.mapreduce.lib.chain.ChainMapper","methods":{"void run(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"run","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void addMapper(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, java.lang.Class, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"addMapper","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.RecordReader":{"name":"org.apache.hadoop.mapred.RecordReader","methods":{"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"float getProgress() throws java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.had
 oop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator","args":["java.lang.String"],"exceptions":[]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnType":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapred.FileOutputFormat":{"name":"org.apache.hadoop.mapred.FileOutputFormat","methods":{"void setOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","o
 rg.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getTaskOutputPath(org.apache.hadoop.mapred.JobConf, java.lang.String) throws java.io.IOException":{"name":"getTaskOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":["java.io.IOException"]},"void setOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"setOutputCompressorClass","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"java.lang.Class getOutputCompressorClass(org.apache.hadoop.mapred.JobConf, java.lang.Class)":{"name":"getOutputCompressorClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapred.JobConf","java.lang.Class"],"exceptions":[]},"void setCompressOutput(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setCompressOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"java.lang.Stri
 ng getUniqueName(org.apache.hadoop.mapred.JobConf, java.lang.String)":{"name":"getUniqueName","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.Path getOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws org.apache.hadoop.mapred.InvalidJobConfException, java.io.IOException, org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["org.apache.hadoop.mapred.InvalidJobConfException","java.io.IOException","org.apache.hadoop.mapred.FileAlreadyExistsException"]},"org.apache.hadoop.fs.Path getPathForCustomFile(org.apache.hadoop.mapred.JobConf, java.lang.String)"
 :{"name":"getPathForCustomFile","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"void setWorkOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setWorkOutputPath","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"boolean getCompressOutput(org.apache.hadoop.mapred.JobConf)":{"name":"getCompressOutput","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.fs.Path getWorkOutputPath(org.apache.hadoop.mapred.JobConf)":{"name":"getWorkOutputPath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","retur
 nType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader","methods":{"void accept(org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector, org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"accept","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader$JoinCollector","org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int id()":{"name":"id","returnType":"int","args":[],"exceptions":[]},"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException
 , java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void skip(org.apache.hadoop.io.WritableComparable) throws java.lang.InterruptedException, java.io.IOException":{"name":"skip","returnType":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"org.apache.hadoop.io.WritableComparable key()":{"name":"key","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},
 "float getProgress() throws java.lang.InterruptedException, java.io.IOException":{"name":"getProgress","returnType":"float","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void add(org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader) throws java.lang.In
 terruptedException, java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.WritableComparable getCurrentKey()":{"name":"getCurrentKey","returnType":"org.apache.hadoop.io.WritableComparable","args":[],"exceptions":[]},"java.lang.Object getCurrentKey() throws java.lang.InterruptedException, java.io.IOException":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void key(org.apache.hadoop.io.WritableComparable) throws java.io.IOException":{"name":"key","returnTyp
 e":"void","args":["org.apache.hadoop.io.WritableComparable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.JoinRecordReader":{"name":"org.apache.hadoop.mapred.join.JoinRecordReader","methods":{"org.apache.hadoop.mapred.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapred.join.TupleWritable","args":[],"exceptions":[]},"java.lang.Object createKey()":{"name":"createKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"java.lang.Object createValue()":{"name":"createValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"boolean next(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.mapred.join.TupleWritable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.mapred.join.TupleWritable"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java
 .lang.Object"],"exceptions":[]},"boolean next(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat","methods":{"void setOutputFormatClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setOutputFormatClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void checkOutputSpecs(org.apac
 he.hadoop.mapreduce.JobContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.OuterJoinRecordReader":{"name":"org.apache.hadoop.mapred.join.OuterJoinRecordReader","methods":{}},"org.apache.hadoop.mapred.TextOutputFormat":{"name":"org.apache.hadoop.mapred.TextOutputFormat","methods":{"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf,
  java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.pipes.Submitter":{"name":"org.apache.hadoop.mapred.pipes.Submitter","methods":{"boolean getKeepCommandFile(org.apache.hadoop.mapred.JobConf)":{"name":"getKeepCommandFile","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob jobSubmit(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"jobSubmit","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setIsJavaMapper(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaMapper","returnType":"void","arg
 s":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordWriter","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"boolean getIsJavaRecordWriter(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordWriter","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaReducer(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaReducer","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob submitJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"submitJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.Job
 Conf"],"exceptions":["java.io.IOException"]},"void setIsJavaRecordReader(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaRecordReader","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"int run([Ljava.lang.String;) throws java.lang.Exception":{"name":"run","returnType":"int","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.String getExecutable(org.apache.hadoop.mapred.JobConf)":{"name":"getExecutable","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void setKeepCommandFile(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setKeepCommandFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setIsJavaReducer(org.apache.hadoop.mapred.JobConf, boolean)":{"name":"setIsJavaReducer","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","boolean"],"exceptions":[]},"void setExecutable(org.apache.hadoop.m
 apred.JobConf, java.lang.String)":{"name":"setExecutable","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String"],"exceptions":[]},"org.apache.hadoop.mapred.RunningJob runJob(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"runJob","returnType":"org.apache.hadoop.mapred.RunningJob","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"boolean getIsJavaMapper(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaMapper","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"boolean getIsJavaRecordReader(org.apache.hadoop.mapred.JobConf)":{"name":"getIsJavaRecordReader","returnType":"boolean","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.CombineFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache
 .hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"
 ]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorDescriptor","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapred.ClusterStatus":{"name":"org.apache.hadoop.mapred.ClusterStatus","methods":{"int getTaskTrackers()":{"name":"getTaskTrackers","returnType":"int","args":[],"exceptions":[]},"int getMaxMapTasks()":{"name":"getMaxMapTasks","returnType":"int","args":[],"exceptions":[]},"long getMaxMemory()":{"name":"getMaxMemory","returnType":"long","args":[],"exceptions":[]},"int getMaxReduceTasks()":{"name":"getMaxReduceTasks","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getGraylistedTrackerNames()":{"name":"getGraylistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatu
 s getJobTrackerStatus()":{"name":"getJobTrackerStatus","returnType":"org.apache.hadoop.mapreduce.Cluster$JobTrackerStatus","args":[],"exceptions":[]},"int getReduceTasks()":{"name":"getReduceTasks","returnType":"int","args":[],"exceptions":[]},"int getGraylistedTrackers()":{"name":"getGraylistedTrackers","returnType":"int","args":[],"exceptions":[]},"long getTTExpiryInterval()":{"name":"getTTExpiryInterval","returnType":"long","args":[],"exceptions":[]},"long getUsedMemory()":{"name":"getUsedMemory","returnType":"long","args":[],"exceptions":[]},"java.util.Collection getActiveTrackerNames()":{"name":"getActiveTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int getMapTasks()":{"name":"getMapTasks","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobTracker$State getJobTr
 ackerState()":{"name":"getJobTrackerState","returnType":"org.apache.hadoop.mapred.JobTracker$State","args":[],"exceptions":[]},"int getBlacklistedTrackers()":{"name":"getBlacklistedTrackers","returnType":"int","args":[],"exceptions":[]},"java.util.Collection getBlacklistedTrackerNames()":{"name":"getBlacklistedTrackerNames","returnType":"java.util.Collection","args":[],"exceptions":[]},"java.util.Collection getBlackListedTrackersInfo()":{"name":"getBlackListedTrackersInfo","returnType":"java.util.Collection","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int getNumExcludedNodes()":{"name":"getNumExcludedNodes","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.MapReduceBase":{"name":"org.apache.hadoop.mapred.MapReduceBase","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","retur
 nType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.join.TupleWritable":{"name":"org.apache.hadoop.mapred.join.TupleWritable","methods":{}},"org.apache.hadoop.mapred.ID":{"name":"org.apache.hadoop.mapred.ID","methods":{}},"org.apache.hadoop.mapred.lib.RegexMapper":{"name":"org.apache.hadoop.mapred.lib.RegexMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void map(java.lang.Object, org.apache.hadoop.io.Text, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.
 io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","org.apache.hadoop.io.Text","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueAggregator":{"name":"org.ap
 ache.hadoop.mapred.lib.aggregate.ValueAggregator","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorBaseDescriptor","methods":{"java.util.ArrayList generateKeyValPairs(java.lang.Object, java.lang.Object)":{"name":"generateKeyValPairs","returnType":"java.util.ArrayList","args":["java.lang.Object","java.lang.Object"],"exceptions":[]},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator generateValueAggregator(java.lang.String, long)":{"name":"generateValueAggregator","returnType":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregator","args":["java.lang.String","long"],"exceptions":[]},"void configure(org.apache.hadoop.conf.Configuration)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.util.Map$Entry generateEntry(java.lang.String, java.lang.String, org.apache.hadoop.io.Text)":{"name":"generateEntry","returnTy
 pe":"java.util.Map$Entry","args":["java.lang.String","java.lang.String","org.apache.hadoop.io.Text"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.DoubleValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void addNextValue(double)":{"name":"addNextValue","returnType":"void","args":["double"],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"double getSum()":{"name":"getSum","returnType":"double","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggr
 egate.LongValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"long getVal()":{"name":"getVal","returnType":"long","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.InputSplit":{"name":"org.apache.hadoop.mapreduce.InputSplit","methods":{"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"[Ljava
 .lang.String; getLocations() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"long getLength() throws java.lang.InterruptedException, java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.TotalOrderPartitioner":{"name":"org.apache.hadoop.mapred.lib.TotalOrderPartitioner","methods":{"void setPartitionFile(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)":{"name":"setPartitionFile","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","org.apache.hadoop.fs.Path"],"exceptions":[]},"java.lang.String getPartitionFile(org.apache.hadoop.mapred.JobConf)":{"name":"getPartitionFile","returnType":"java.lang.String","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]},"void configure(org.apache.hadoo
 p.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.Counter":{"name":"org.apache.hadoop.mapreduce.Counter","methods":{"void setDisplayName(java.lang.String)":{"name":"setDisplayName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.Counter getUnderlyingCounter()":{"name":"getUnderlyingCounter","returnType":"org.apache.hadoop.mapreduce.Counter","args":[],"exceptions":[]},"void increment(long)":{"name":"increment","returnType":"void","args":["long"],"exceptions":[]},"void setValue(long)":{"name":"setValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getDisplayName()":{"name":"getDisplayName","returnType":"java.lang.String","args":
 [],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.db.DBInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.db.DBInputFormat","methods":{"org.apache.hadoop.mapreduce.lib.db.DBConfiguration getDBConf()":{"name":"getDBConf","returnType":"org.apache.hadoop.mapreduce.lib.db.DBConfiguration","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Configuration","args":[],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setC
 onf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"java.sql.Connection createConnection()":{"name":"createConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String, java.lang.String, [Ljava.lang.String;)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class","java.lang.String","java.lang.String","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.sql.Connection getConnection()":{"name":"getConnection","returnType":"java.sql.Connection","args":[],"exceptions":[]},"java.lang.String getDBProductName()":{"name":"getDBProductName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setInput(org.apache.hadoop.mapreduce.Job, java.lang.Class, java.lang.String, java.lang.String)":{"name":"setInput","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lan
 g.Class","java.lang.String","java.lang.String"],"exceptions":[]},"java.util.List getSplits(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"getSplits","returnType":"java.util.List","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boo
 lean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.join.CompositeInputFormat":{"name":"org.apache.hadoop.mapred.join.CompositeInputFormat","methods":{"org.apache.hadoop.mapred.join.ComposableRecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.join.ComposableRecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Str
 ing, java.lang.Class, [Lorg.apache.hadoop.fs.Path;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Lorg.apache.hadoop.fs.Path;"],"exceptions":[]},"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void setFormat(org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":"setFormat","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"java.lang.String compose(java.lang.Class, java.lang.String)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.Class","java.lang.String"],"exceptions":[]},"
 java.lang.String compose(java.lang.String, java.lang.Class, [Ljava.lang.String;)":{"name":"compose","returnType":"java.lang.String","args":["java.lang.String","java.lang.Class","[Ljava.lang.String;"],"exceptions":[]},"[Lorg.apache.hadoop.mapred.InputSplit; getSplits(org.apache.hadoop.mapred.JobConf, int) throws java.io.IOException":{"name":"getSplits","returnType":"[Lorg.apache.hadoop.mapred.InputSplit;","args":["org.apache.hadoop.mapred.JobConf","int"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.Partitioner":{"name":"org.apache.hadoop.mapreduce.Partitioner","methods":{"int getPartition(java.lang.Object, java.lang.Object, int)":{"name":"getPartition","returnType":"int","args":["java.lang.Object","java.lang.Object","int"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.NullOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext)":{"name":"checkOutputSpe
 cs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]},"org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext)":{"name":"getOutputCommitter","returnType":"org.apache.hadoop.mapreduce.OutputCommitter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer":{"name":"org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iter
 able","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.TaskCompletionEvent$Status":{"name":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","methods":{"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.mapred.TaskCompletionEvent$Status;","args":[],"exceptions":[]},"org.apache.hadoop.mapred.TaskCompletionEvent$Status valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.mapred.TaskCompletionEvent$Status","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.mapred.JobContext":{"name":"org.apache.hadoop.mapred.JobContext","methods":{"org.apache.hadoop.util.Progressable getProgressible()":{"name":"getProgressible","returnType":"org.apache.hadoop.util.Progressable","args":[],"exceptions":[]},"org.apache.hadoop.mapred.JobConf getJobConf()":{"name":"getJobConf","returnType":"org.apache.had
 oop.mapred.JobConf","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.OutputCommitter":{"name":"org.apache.hadoop.mapreduce.OutputCommitter","methods":{"boolean isCommitJobRepeatable(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isCommitJobRepeatable","returnType":"boolean","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void commitJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"commitJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"abortTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"isRecoverySupported","returnType":"boolean","args
 ":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"needsTaskCommit","returnType":"boolean","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void setupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"setupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void recoverTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"recoverTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void cleanupJob(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"cleanupJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void abortJ
 ob(org.apache.hadoop.mapreduce.JobContext, org.apache.hadoop.mapreduce.JobStatus$State) throws java.io.IOException":{"name":"abortJob","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext","org.apache.hadoop.mapreduce.JobStatus$State"],"exceptions":["java.io.IOException"]},"void setupTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"setupTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"void commitTask(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"commitTask","returnType":"void","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"boolean isRecoverySupported()":{"name":"isRecoverySupported","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram":{"name":"org.apache.hadoop.mapred.lib.aggregate.ValueHistogram","methods":{}},"o
 rg.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputS
 plit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.InputSplit":{"name":"org.apache.hadoop.mapred.InputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.LongValueSum","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.util.ArrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addNextValue(long)":{"name":"addNextValue","returnType":"void","args":["long"],"exceptions":[]},"java.lang.String getReport()":{"
 name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]},"long getSum()":{"name":"getSum","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader","methods":{"void initialize(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.lang.InterruptedException, java.io.IOException":{"name":"initialize","returnType":"void","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentValue()":{"name":"getCurrentValue","returnType":"java.lang.Object","args":[],"exceptions":[]},"float getProgress() throws java.io.IOException":{"name":"getProg
 ress","returnType":"float","args":[],"exceptions":["java.io.IOException"]},"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.lang.Object getCurrentKey()":{"name":"getCurrentKey","returnType":"java.lang.Object","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat":{"name":"org.apache.hadoop.mapred.lib.MultipleTextOutputFormat","methods":{}},"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.StringValueMin","methods":{"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"java.lang.String getVal()":{"name":"getVal","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.A
 rrayList getCombinerOutput()":{"name":"getCombinerOutput","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.lang.String getReport()":{"name":"getReport","returnType":"java.lang.String","args":[],"exceptions":[]},"void addNextValue(java.lang.Object)":{"name":"addNextValue","returnType":"void","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader","methods":{"org.apache.hadoop.io.Writable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorJob","methods":{"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.j
 obcontrol.JobControl","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl createValueAggregatorJobs([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJobs","returnType":"org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.conf.Configuration setAggregatorDescriptors([Ljava.lang.Class;)":{"name":"setAggregatorDescriptors","returnType":"org.apache.hadoop.conf.Configuration","args":["[Ljava.lang.Class;"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.InterruptedException, java.io.IOException, java.lang.ClassNotFoundException":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.InterruptedException","java.io.IOException","java.lang.ClassNotFoundException"]},"org.apache.hadoop.mapreduce.Job createValueA
 ggregatorJob([Ljava.lang.String;, [Ljava.lang.Class;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["[Ljava.lang.String;","[Ljava.lang.Class;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.Job createValueAggregatorJob(org.apache.hadoop.conf.Configuration, [Ljava.lang.String;) throws java.io.IOException":{"name":"createValueAggregatorJob","returnType":"org.apache.hadoop.mapreduce.Job","args":["org.apache.hadoop.conf.Configuration","[Ljava.lang.String;"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.LongValueMin":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueMin","methods":{}},"org.apache.hadoop.mapred.lib.aggregate.LongValueSum":{"name":"org.apache.hadoop.mapred.lib.aggregate.LongValueSum","methods":{}},"org.apache.hadoop.mapred.JobID":{"name":"org.apache.hadoop.mapred.JobID","methods":{"java.lang.String getJobIDsPattern(java.lang.String, java.lan
 g.Integer)":{"name":"getJobIDsPattern","returnType":"java.lang.String","args":["java.lang.String","java.lang.Integer"],"exceptions":[]},"org.apache.hadoop.mapred.JobID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapred.JobID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"org.apache.hadoop.mapred.JobID read(java.io.DataInput) throws java.io.IOException":{"name":"read","returnType":"org.apache.hadoop.mapred.JobID","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.JobID downgrade(org.apache.hadoop.mapreduce.JobID)":{"name":"downgrade","returnType":"org.apache.hadoop.mapred.JobID","args":["org.apache.hadoop.mapreduce.JobID"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.FileSplit":{"name":"org.apache.hadoop.mapreduce.lib.input.FileSplit","methods":{"long getStart()":{"name":"getStart","returnType":"long","args":[],"exceptio
 ns":[]},"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.mapred.SplitLocationInfo; getLocationInfo() throws java.io.IOException":{"name":"getLocationInfo","returnType":"[Lorg.apache.hadoop.mapred.SplitLocationInfo;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","arg
 s":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer":{"name":"org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionReducer","methods":{"void reduce(java.lang.Object, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void reduce(org.apache.hadoop.io.Text, java.lang.Iterable, org.apache.hadoop.mapreduce.Reducer$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"reduce","returnType":"void","args":["org.apache.hadoop.io.Text","java.lang.Iterable","org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Reducer
 $Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Reducer$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.aggregate.StringValueMax":{"name":"org.apache.hadoop.mapred.lib.aggregate.StringValueMax","methods":{}},"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat":{"name":"org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat","methods":{"void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext) throws java.io.IOException":{"name":"checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":["java.io.IOException"]},"void setSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputKeyClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"org.apache
 .hadoop.mapreduce.RecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapreduce.RecordWriter","args":["org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]},"java.lang.Class getSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputValueClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setSequenceFileOutputValueClass(org.apache.hadoop.mapreduce.Job, java.lang.Class)":{"name":"setSequenceFileOutputValueClass","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.Class"],"exceptions":[]},"java.lang.Class getSequenceFileOutputKeyClass(org.apache.hadoop.mapreduce.JobContext)":{"name":"getSequenceFileOutputKeyClass","returnType":"java.lang.Class","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]}}},"org.apac
 he.hadoop.mapred.Reducer":{"name":"org.apache.hadoop.mapred.Reducer","methods":{"void reduce(java.lang.Object, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.jobcontrol.JobControl":{"name":"org.apache.hadoop.mapred.jobcontrol.JobControl","methods":{"java.util.ArrayList getReadyJobs()":{"name":"getReadyJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getFailedJobs()":{"name":"getFailedJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getSuccessfulJobs()":{"name":"getSuccessfulJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getWaitingJobs()":{"name":"getWaitingJob
 s","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"java.util.ArrayList getRunningJobs()":{"name":"getRunningJobs","returnType":"java.util.ArrayList","args":[],"exceptions":[]},"void addJobs(java.util.Collection)":{"name":"addJobs","returnType":"void","args":["java.util.Collection"],"exceptions":[]},"int getState()":{"name":"getState","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.ResetableIterator":{"name":"org.apache.hadoop.mapreduce.lib.join.ResetableIterator","methods":{"void add(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean next(org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"next","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"boolean replay
 (org.apache.hadoop.io.Writable) throws java.io.IOException":{"name":"replay","returnType":"boolean","args":["org.apache.hadoop.io.Writable"],"exceptions":["java.io.IOException"]},"boolean hasNext()":{"name":"hasNext","returnType":"boolean","args":[],"exceptions":[]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper":{"name":"org.apache.hadoop.mapreduce.lib.aggregate.ValueAggregatorMapper","methods":{"void map(java.lang.Object, java.lang.Object, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["java.lang.Object","java.lang.Object","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void map(org.apache.hadoo
 p.io.WritableComparable, org.apache.hadoop.io.Writable, org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"map","returnType":"void","args":["org.apache.hadoop.io.WritableComparable","org.apache.hadoop.io.Writable","org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void setup(org.apache.hadoop.mapreduce.Mapper$Context) throws java.lang.InterruptedException, java.io.IOException":{"name":"setup","returnType":"void","args":["org.apache.hadoop.mapreduce.Mapper$Context"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]}}},"org.apache.hadoop.mapred.join.CompositeInputSplit":{"name":"org.apache.hadoop.mapred.join.CompositeInputSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getLocation
 (int) throws java.io.IOException":{"name":"getLocation","returnType":"[Ljava.lang.String;","args":["int"],"exceptions":["java.io.IOException"]},"void add(org.apache.hadoop.mapred.InputSplit) throws java.io.IOException":{"name":"add","returnType":"void","args":["org.apache.hadoop.mapred.InputSplit"],"exceptions":["java.io.IOException"]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"long getLength() throws java.io.IOException":{"name":"getLength","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapred.InputSplit get(int)":{"name":"get","returnType":"org.apache.hadoop.mapred.InputSplit","args":["int"],"exceptions":[]},"long getLength(int) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["int"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"re
 adFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.BinaryPartitioner":{"name":"org.apache.hadoop.mapred.lib.BinaryPartitioner","methods":{"void configure(org.apache.hadoop.mapred.JobConf)":{"name":"configure","returnType":"void","args":["org.apache.hadoop.mapred.JobConf"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator":{"name":"org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator","methods":{"void setKeyFieldComparatorOptions(org.apache.hadoop.mapreduce.Job, java.lang.String)":{"name":"setKeyFieldComparatorOptions","returnType":"void","args":["org.apache.hadoop.mapreduce.Job","java.lang.String"],"exceptions":[]},"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"org.apache.hadoop.conf.Configuration getConf()":{"name":"getConf","returnType":"org.apache.hadoop.conf.Conf
 iguration","args":[],"exceptions":[]},"java.lang.String getKeyFieldComparatorOption(org.apache.hadoop.mapreduce.JobContext)":{"name":"getKeyFieldComparatorOption","returnType":"java.lang.String","args":["org.apache.hadoop.mapreduce.JobContext"],"exceptions":[]},"void setConf(org.apache.hadoop.conf.Configuration)":{"name":"setConf","returnType":"void","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat":{"name":"org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat","methods":{"org.apache.hadoop.mapreduce.RecordReader createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext) throws java.io.IOException":{"name":"createRecordReader","returnType":"org.apache.hadoop.mapreduce.RecordReader","args":["org.apache.hadoop.mapreduce.InputSplit","org.apache.hadoop.mapreduce.TaskAttemptContext"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoo
 p.mapred.MultiFileSplit":{"name":"org.apache.hadoop.mapred.MultiFileSplit","methods":{"[Ljava.lang.String; getLocations() throws java.io.IOException":{"name":"getLocations","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.mapred.JobQueueInfo":{"name":"org.apache.hadoop.mapred.JobQueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setJobStatuses([Lorg.apache.hadoop.mapreduce.JobStatus;)":{"name":"setJobStatuses","returnType":"void","args":["[Lorg.apache.hadoop.mapreduce.JobStatus;"],"exceptions":[]},"void setChildren(java.util.List)":{"name":"setChildren","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getChildren()":{"name":"getChildren","returnType":"java.util.List","args":[],"exceptions":[]
 },"void setQueueState(java.lang.String)":{"name":"setQueueState","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getQueueState()":{"name":"getQueueState","returnType":"java.lang.String","args":[],"exceptions":[]},"void setSchedulingInfo(java.lang.String)":{"name":"setSchedulingInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setProperties(java.util.Properties)":{"name":"setProperties","returnType":"void","args":["java.util.Properties"],"exceptions":[]}}},"org.apache.hadoop.mapred.lib.db.DBOutputFormat":{"name":"org.apache.hadoop.mapred.lib.db.DBOutputFormat","methods":{"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, [Ljava.lang.String;)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void checkOutputSpecs(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf) throws java.io.IOException":{"name":
 "checkOutputSpecs","returnType":"void","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf"],"exceptions":["java.io.IOException"]},"void setOutput(org.apache.hadoop.mapred.JobConf, java.lang.String, int)":{"name":"setOutput","returnType":"void","args":["org.apache.hadoop.mapred.JobConf","java.lang.String","int"],"exceptions":[]},"org.apache.hadoop.mapred.RecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.mapred.JobConf, java.lang.String, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"getRecordWriter","returnType":"org.apache.hadoop.mapred.RecordWriter","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.mapred.JobConf","java.lang.String","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.RecordWriter":{"name":"org.apache.hadoop.mapred.RecordWriter","methods":{"void close(org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"
 close","returnType":"void","args":["org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]},"void write(java.lang.Object, java.lang.Object) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.lang.Object","java.lang.Object"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.FileAlreadyExistsException":{"name":"org.apache.hadoop.mapred.FileAlreadyExistsException","methods":{}},"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader":{"name":"org.apache.hadoop.mapreduce.lib.join.JoinRecordReader","methods":{"boolean nextKeyValue() throws java.lang.InterruptedException, java.io.IOException":{"name":"nextKeyValue","returnType":"boolean","args":[],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"org.apache.hadoop.mapreduce.lib.join.TupleWritable createValue()":{"name":"createValue","returnType":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable cre
 ateValue()":{"name":"createValue","returnType":"org.apache.hadoop.io.Writable","args":[],"exceptions":[]}}},"org.apache.hadoop.mapreduce.lib.join.TupleWritable":{"name":"org.apache.hadoop.mapreduce.lib.join.TupleWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.io.Writable get(int)":{"name":"get","returnType":"org.apache.hadoop.io.Writable","args":["int"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lan
 g.Object"],"exceptions":[]},"boolean has(int)":{"name":"has","returnType":"boolean","args":["int"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.CombineTextInputFormat":{"name":"org.apache.hadoop.mapred.lib.CombineTextInputFormat","methods":{"org.apache.hadoop.mapred.RecordReader getRecordReader(org.apache.hadoop.mapred.InputSplit, org.apache.hadoop.mapred.JobConf, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"getRecordReader","returnType":"org.apache.hadoop.mapred.RecordReader","args":["org.apache.hadoop.mapred.InputSplit","org.apache.hadoop.mapred.JobConf","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapred.lib.IdentityReducer":{"name":"org.apache.hadoop.mapred.lib.IdentityReducer","methods":{"void reduce(java.lang.Object, java.util.
 Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) throws java.io.IOException":{"name":"reduce","returnType":"void","args":["java.lang.Object","java.util.Iterator","org.apache.hadoop.mapred.OutputCollector","org.apache.hadoop.mapred.Reporter"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.mapreduce.TaskID":{"name":"org.apache.hadoop.mapreduce.TaskID","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.mapreduce.ID)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.mapreduce.ID"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType(char)":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":["char"],"exceptions":[]},"boolean isMap()":{"name":"isMap","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getAllTaskTypes()":{"name":"getAllTaskTypes","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"char getRepresentingCharacter(org.apache.hadoop.mapreduce.TaskType)":{"name":"getRepresentingCharacter","returnType":"char","args":["org.apache.hadoop.mapreduce.TaskType"],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskID forName(java.lang.String) throws java.lang.IllegalArgumentException":{"name":"forName","returnType":"org.apache.hadoop.mapreduce.TaskID","args":["java.lang.String"],"exceptions":["java.lang.IllegalArgumentException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.mapreduce.
 JobID getJobID()":{"name":"getJobID","returnType":"org.apache.hadoop.mapreduce.JobID","args":[],"exceptions":[]},"org.apache.hadoop.mapreduce.TaskType getTaskType()":{"name":"getTaskType","returnType":"org.apache.hadoop.mapreduce.TaskType","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.filecache.DistributedCache":{"name":"org.apache.hadoop.filecache.DistributedCache","methods":{"void setLocalArchives(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setLocalArchives","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"long getTimestamp(org.apache.hadoop.conf.Configuration, java.net.URI) throws java.io.IOException":{"name":"getTimestamp","returnType":"long","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":["java.io.IOEx
 ception"]},"void setFileTimestamps(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setFileTimestamps

<TRUNCATED>

[08/50] [abbrv] bigtop git commit: ODPI-184 Pulled my head out. I had written the tests so that they were running on the client rather than connecting to the server. Fixed that so they will now talk to the metastore server.

Posted by rv...@apache.org.
ODPI-184 Pulled my head out.  I had written the tests so that they were running on the client rather than connecting to the server.  Fixed that so they will now talk to the metastore server.

(cherry picked from commit 76dc0e3854a36bc41bd9f3298298e0db9393de54)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/46cd194b
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/46cd194b
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/46cd194b

Branch: refs/heads/master
Commit: 46cd194bac9fd6fe9f8259023c80efa87413a9c4
Parents: 166e9f7
Author: Alan Gates <ga...@hortonworks.com>
Authored: Tue Nov 1 16:38:14 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:11 2017 -0700

----------------------------------------------------------------------
 .../org/odpi/specs/runtime/hive/TestThrift.java | 129 ++++++++-----------
 1 file changed, 57 insertions(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/46cd194b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index 1aede96..5eaab95 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -17,29 +17,27 @@
  */
 package org.odpi.specs.runtime.hive;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
 import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.DropPartitionsRequest;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.RequestPartsSpec;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
 import org.apache.thrift.TException;
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -47,7 +45,9 @@ import java.util.Random;
 
 public class TestThrift {
 
-  private static ThriftHiveMetastore.Iface client = null;
+  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+
+  private static IMetaStoreClient client = null;
   private static HiveConf conf;
 
   private Random rand;
@@ -58,7 +58,8 @@ public class TestThrift {
       String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
       conf = new HiveConf();
       conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
-      client = new HiveMetaStore.HMSHandler("ODPi test", conf, true);
+      LOG.info("Set to test against metastore at " + url);
+      client = new HiveMetaStoreClient(conf);
     }
   }
 
@@ -72,22 +73,20 @@ public class TestThrift {
   public void db() throws TException {
     final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
 
-    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
-        "write to");
-    Database db = new Database(dbName, "a db", location, new HashMap<String, String>());
-    client.create_database(db);
-    db = client.get_database(dbName);
+    Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
+    client.createDatabase(db);
+    db = client.getDatabase(dbName);
     Assert.assertNotNull(db);
     db = new Database(db);
     db.getParameters().put("a", "b");
-    client.alter_database(dbName, db);
-    List<String> alldbs = client.get_databases("odpi_*");
+    client.alterDatabase(dbName, db);
+    List<String> alldbs = client.getDatabases("odpi_*");
     Assert.assertNotNull(alldbs);
     Assert.assertTrue(alldbs.size() > 0);
-    alldbs = client.get_all_databases();
+    alldbs = client.getAllDatabases();
     Assert.assertNotNull(alldbs);
     Assert.assertTrue(alldbs.size() > 0);
-    client.drop_database(dbName, true, true);
+    client.dropDatabase(dbName, true, true);
   }
 
   // Not testing types calls, as they aren't used AFAIK
@@ -95,138 +94,129 @@ public class TestThrift {
   @Test
   public void nonPartitionedTable() throws TException {
     final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
-    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
-        "write to");
 
     // I don't test every operation related to tables, but only those that are frequently used.
     SerDeInfo serde = new SerDeInfo("default_serde",
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
     FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), location,
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
         new HashMap<String, String>());
     Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
         new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    EnvironmentContext envContext = new EnvironmentContext(new HashMap<String, String>());
-    client.create_table_with_environment_context(table, envContext);
+    client.createTable(table);
 
-    table = client.get_table("default", tableName);
+    table = client.getTable("default", tableName);
     Assert.assertNotNull(table);
 
     List<Table> tables =
-        client.get_table_objects_by_name("default", Collections.singletonList(tableName));
+        client.getTableObjectsByName("default", Collections.singletonList(tableName));
     Assert.assertNotNull(tables);
     Assert.assertEquals(1, tables.size());
 
-    List<String> tableNames = client.get_tables("default", "odpi_*");
+    List<String> tableNames = client.getTables("default", "odpi_*");
     Assert.assertNotNull(tableNames);
     Assert.assertTrue(tableNames.size() >= 1);
 
-    tableNames = client.get_all_tables("default");
+    tableNames = client.getAllTables("default");
     Assert.assertNotNull(tableNames);
     Assert.assertTrue(tableNames.size() >= 1);
 
-    List<FieldSchema> cols = client.get_fields("default", tableName);
+    List<FieldSchema> cols = client.getFields("default", tableName);
     Assert.assertNotNull(cols);
     Assert.assertEquals(1, cols.size());
 
-    cols = client.get_schema_with_environment_context("default", tableName, envContext);
+    cols = client.getSchema("default", tableName);
     Assert.assertNotNull(cols);
     Assert.assertEquals(1, cols.size());
 
     table = new Table(table);
     table.getParameters().put("a", "b");
-    client.alter_table_with_cascade("default", tableName, table, false);
+    client.alter_table("default", tableName, table, false);
 
     table.getParameters().put("c", "d");
-    client.alter_table_with_environment_context("default", tableName, table, envContext);
+    client.alter_table("default", tableName, table);
 
-    client.drop_table_with_environment_context("default", tableName, true, envContext);
+    client.dropTable("default", tableName, true, false);
   }
 
   @Test
   public void partitionedTable() throws TException {
     final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
-    String location = JdbcConnector.getProperty(JdbcConnector.LOCATION, " HDFS location we can " +
-        "write to");
 
     // I don't test every operation related to tables, but only those that are frequently used.
     SerDeInfo serde = new SerDeInfo("default_serde",
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
     FieldSchema fs = new FieldSchema("a", "int", "no comment");
-    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), location,
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
         new HashMap<String, String>());
     FieldSchema pk = new FieldSchema("pk", "string", "");
     Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
         new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
-    EnvironmentContext envContext = new EnvironmentContext(new HashMap<String, String>());
-    client.create_table_with_environment_context(table, envContext);
+    client.createTable(table);
 
-    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/x",
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
         new HashMap<String, String>());
     Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
         0, sd, new HashMap<String, String>());
-    client.add_partition_with_environment_context(partition, envContext);
+    client.add_partition(partition);
 
-    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/y",
+    List<Partition> partitions = new ArrayList<>(2);
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
         new HashMap<String, String>());
-    partition = new Partition(Collections.singletonList("y"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>());
-    client.add_partitions(Collections.singletonList(partition));
-
-    sd = new StorageDescriptor(Collections.singletonList(fs), location + "/z",
+    partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>()));
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
         conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
         new HashMap<String, String>());
-    partition = new Partition(Collections.singletonList("z"), "default", tableName, 0,
-        0, sd, new HashMap<String, String>());
-    AddPartitionsRequest rqst = new AddPartitionsRequest("default", tableName,
-        Collections.singletonList(partition), true);
-    client.add_partitions_req(rqst);
+    partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>()));
+    client.add_partitions(partitions);
 
-    List<Partition> parts = client.get_partitions("default", tableName, (short)-1);
+    List<Partition> parts = client.listPartitions("default", tableName, (short)-1);
     Assert.assertNotNull(parts);
     Assert.assertEquals(3, parts.size());
 
-    parts = client.get_partitions_with_auth("default", tableName, (short)-1, "me",
+    parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
+        (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me",
         Collections.<String>emptyList());
     Assert.assertNotNull(parts);
     Assert.assertEquals(3, parts.size());
 
-    parts = client.get_partitions_ps("default", tableName, Collections.singletonList("x"),
-        (short)-1);
-    Assert.assertNotNull(parts);
-    Assert.assertEquals(1, parts.size());
+    List<String> partNames = client.listPartitionNames("default", tableName, (short)-1);
+    Assert.assertNotNull(partNames);
+    Assert.assertEquals(3, partNames.size());
 
-    parts = client.get_partitions_by_filter("default", tableName, "pk = \"x\"", (short)-1);
+    parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1);
     Assert.assertNotNull(parts);
     Assert.assertEquals(1, parts.size());
 
-    parts = client.get_partitions_by_names("default", tableName, Collections.singletonList("pk=x"));
+    parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
     Assert.assertNotNull(parts);
     Assert.assertEquals(1, parts.size());
 
-    partition = client.get_partition("default", tableName, Collections.singletonList("x"));
+    partition = client.getPartition("default", tableName, Collections.singletonList("x"));
     Assert.assertNotNull(partition);
 
-    partition = client.get_partition_by_name("default", tableName, "pk=x");
+    partition = client.getPartition("default", tableName, "pk=x");
     Assert.assertNotNull(partition);
 
-    partition = client.get_partition_with_auth("default", tableName, Collections.singletonList("x"),
+    partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
         "me", Collections.<String>emptyList());
     Assert.assertNotNull(partition);
 
-    List<String> partitionNames = client.get_partition_names("default", tableName, (short)-1);
-    Assert.assertNotNull(partitionNames);
-    Assert.assertEquals(3, partitionNames.size());
-
     partition = new Partition(partition);
     partition.getParameters().put("a", "b");
     client.alter_partition("default", tableName, partition);
@@ -240,13 +230,8 @@ public class TestThrift {
 
     // Not testing partition marking events, not used by anyone but Hive replication AFAIK
 
-    client.drop_partition_by_name_with_environment_context("default", tableName, "pk=x", true,
-        envContext);
-    client.drop_partition_with_environment_context("default", tableName,
-        Collections.singletonList("y"), true, envContext);
-    DropPartitionsRequest dropRequest = new DropPartitionsRequest("default", tableName,
-        RequestPartsSpec.names(Collections.singletonList("pk=z")));
-    client.drop_partitions_req(dropRequest);
+    client.dropPartition("default", tableName, "pk=x", true);
+    client.dropPartition("default", tableName, Collections.singletonList("y"), true);
   }
 
   // Not testing index calls, as no one uses indices


[12/50] [abbrv] bigtop git commit: Progress so far. Doesn't work yet, but committing to avoid another data loss.

Posted by rv...@apache.org.
Progress so far.  Doesn't work yet, but committing to avoid another data loss.

(cherry picked from commit 18ee8453c11e7fd7c25af75e6c403753db11d5f5)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/c3137954
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/c3137954
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/c3137954

Branch: refs/heads/master
Commit: c313795409472dfffda49a4ffcb6dc6c59f9c5a9
Parents: 241c839
Author: Alan Gates <ga...@hortonworks.com>
Authored: Mon Nov 7 15:39:54 2016 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:12 2017 -0700

----------------------------------------------------------------------
 bigtop-tests/spec-tests/runtime/build.gradle    |   8 +-
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 124 ++++++++++
 .../odpi/specs/runtime/hive/JdbcConnector.java  |   3 +
 .../odpi/specs/runtime/hive/TestHCatalog.java   | 224 +++++++++++++++++++
 .../org/odpi/specs/runtime/hive/TestThrift.java |   2 +-
 5 files changed, 359 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/c3137954/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
index 5505550..f0166c9 100644
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ b/bigtop-tests/spec-tests/runtime/build.gradle
@@ -17,6 +17,8 @@
  */
 def junitVersion = '4.11'
 
+apply plugin: 'java'
+
 repositories {
   maven {
     url "http://conjars.org/repo/"
@@ -31,8 +33,12 @@ dependencies {
   compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
   compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
   compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
+  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
+  compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1'
   testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
+  compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
+  testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
   testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
   if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
 }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c3137954/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
new file mode 100644
index 0000000..4a733d6
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hive.hcatalog.data.DefaultHCatRecord;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.StringTokenizer;
+
+public class HCatalogMR extends Configured implements Tool {
+  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
+  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
+
+  @Override
+  public int run(String[] args) throws Exception {
+    Configuration conf = getConf();
+    args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+    String inputTable = args[0];
+    String outputTable = args[1];
+    String inputSchemaStr = args[2];
+    String outputSchemaStr = args[3];
+
+    conf.set(INPUT_SCHEMA, inputSchemaStr);
+    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
+
+    Job job = new Job(conf, "odpi_hcat_test");
+    HCatInputFormat.setInput(job, "default", inputTable);
+
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setJarByClass(HCatalogMR.class);
+    job.setMapperClass(Map.class);
+    job.setReducerClass(Reduce.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    job.setOutputKeyClass(WritableComparable.class);
+    job.setOutputValueClass(HCatRecord.class);
+    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
+    job.setOutputFormatClass(HCatOutputFormat.class);
+
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));
+
+    return job.waitForCompletion(true) ? 0 : 1;
+
+
+  }
+  public static class Map extends Mapper<WritableComparable,
+          HCatRecord, Text, IntWritable> {
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+    private HCatSchema inputSchema = null;
+
+    @Override
+    protected void map(WritableComparable key, HCatRecord value, Context context)
+        throws IOException, InterruptedException {
+      if (inputSchema == null) {
+        inputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
+      }
+      String line = value.getString("line", inputSchema);
+      StringTokenizer tokenizer = new StringTokenizer(line);
+      while (tokenizer.hasMoreTokens()) {
+        word.set(tokenizer.nextToken());
+        context.write(word, one);
+      }
+    }
+  }
+
+  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+    private HCatSchema outputSchema = null;
+
+    @Override
+    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+        IOException, InterruptedException {
+      if (outputSchema == null) {
+        outputSchema =
+            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
+      }
+      int sum = 0;
+      for (IntWritable i : values) {
+        sum += i.get();
+      }
+      HCatRecord output = new DefaultHCatRecord(2);
+      output.set("word", outputSchema, key);
+      output.set("count", outputSchema, sum);
+      context.write(null, output);
+    }
+  }
+ }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c3137954/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
index f5cc379..7512dab 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/JdbcConnector.java
@@ -36,6 +36,9 @@ public class JdbcConnector {
   protected static final String LOCATION = "odpi.test.hive.location";
   protected static final String METASTORE_URL = "odpi.test.hive.metastore.url";
   protected static final String TEST_THRIFT = "odpi.test.hive.thrift.test";
+  protected static final String TEST_HCATALOG = "odpi.test.hive.hcatalog.test";
+  protected static final String HIVE_CONF_DIR = "odpi.test.hive.conf.dir";
+  protected static final String HADOOP_CONF_DIR = "odpi.test.hadoop.conf.dir";
 
   protected static Connection conn;
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c3137954/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
new file mode 100644
index 0000000..4b61131
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hive.hcatalog.data.DefaultHCatRecord;
+import org.apache.hive.hcatalog.data.HCatRecord;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
+import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
+import org.apache.thrift.TException;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+import java.util.StringTokenizer;
+
+
+public class TestHCatalog {
+
+  private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
+
+  private static IMetaStoreClient client = null;
+  private static HiveConf conf;
+  private static HCatSchema inputSchema;
+  private static HCatSchema outputSchema;
+
+  private Random rand;
+
+  @BeforeClass
+  public static void connect() throws MetaException {
+    if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
+      String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
+          "Hive conf directory ");
+      String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
+          "Hadoop conf directory ");
+      conf = new HiveConf();
+      String fileSep = System.getProperty("file.separator");
+      conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
+      conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
+      client = new HiveMetaStoreClient(conf);
+
+    }
+  }
+
+  @Before
+  public void checkIfActive() {
+    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
+    rand = new Random();
+  }
+
+  @Test
+  public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
+      InterruptedException, URISyntaxException {
+    // Create a table to write to
+    final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema schema = new FieldSchema("line", "string", "");
+    inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
+        HCatFieldSchema.Type.STRING, schema.getComment())));
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
+        "org.apache.hadoop.mapred.TextInputFormat",
+        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+
+    final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+    sd = new StorageDescriptor(Arrays.asList(
+          new FieldSchema("word", "string", ""),
+          new FieldSchema("count", "int", "")),
+        null, "org.apache.hadoop.mapred.TextInputFormat",
+        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+        new HashMap<String, String>());
+    table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+    outputSchema = new HCatSchema(Arrays.asList(
+        new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
+        new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
+
+    // TODO Could I use HCatWriter hear and the reader to read it?
+    // Write some stuff into a file in the location of the table
+    table = client.getTable("default", inputTable);
+    String inputFile = table.getSd().getLocation() + "/input";
+    /*
+    String inputFile = JdbcConnector.getProperty(JdbcConnector.LOCATION,
+        "Directory to write a file in ") + "/odpi_hcat_input_" + rand.nextInt(Integer.MAX_VALUE);
+        */
+    Path inputPath = new Path(inputFile);
+    FileSystem fs = FileSystem.get(conf);
+    FSDataOutputStream out = fs.create(inputPath);
+    out.writeChars("Mary had a little lamb\n");
+    out.writeChars("its fleece was white as snow\n");
+    out.writeChars("and everywhere that Mary went\n");
+    out.writeChars("the lamb was sure to go\n");
+    out.close();
+
+    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hadoop")
+        .addArgument("jar")
+        .addArgument("/Users/gates/git/bigtop/runtime-1.2.0-SNAPSHOT.jar")
+        .addArgument(HCatalogMR.class.getName())
+        .addArgument(inputTable)
+        .addArgument(outputTable)
+        .addArgument(inputSchema.getSchemaAsTypeString())
+        .addArgument(outputSchema.getSchemaAsTypeString()));
+    Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
+
+
+
+    /*
+    Job job = new Job(conf, "odpi_hcat_test");
+    HCatInputFormat.setInput(job, "default", inputTable);
+
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setJarByClass(TestHCatalog.class);
+    job.setMapperClass(Map.class);
+    job.setReducerClass(Reduce.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    job.setOutputKeyClass(WritableComparable.class);
+    job.setOutputValueClass(HCatRecord.class);
+    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
+    HCatOutputFormat.setSchema(job, outputSchema);
+    job.setOutputFormatClass(HCatOutputFormat.class);
+
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-hcatalog-core-1.2.1.jar"));
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-metastore-1.2.1.jar"));
+    job.addCacheArchive(new URI("hdfs:/user/gates/hive-exec-1.2.1.jar"));
+
+    Assert.assertTrue(job.waitForCompletion(true));
+    */
+
+    client.dropTable("default", inputTable);
+    client.dropTable("default", outputTable);
+  }
+
+  /*
+  public static class Map extends Mapper<WritableComparable,
+        HCatRecord, Text, IntWritable> {
+    private final static IntWritable one = new IntWritable(1);
+    private Text word = new Text();
+
+    @Override
+    protected void map(WritableComparable key, HCatRecord value, Context context)
+        throws IOException, InterruptedException {
+      String line = value.getString("line", inputSchema);
+      StringTokenizer tokenizer = new StringTokenizer(line);
+      while (tokenizer.hasMoreTokens()) {
+        word.set(tokenizer.nextToken());
+        context.write(word, one);
+      }
+    }
+  }
+
+  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
+    @Override
+    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
+        IOException, InterruptedException {
+      int sum = 0;
+      for (IntWritable i : values) {
+        sum += i.get();
+      }
+      HCatRecord output = new DefaultHCatRecord(2);
+      output.set("word", outputSchema, key);
+      output.set("count", outputSchema, sum);
+      context.write(null, output);
+    }
+  }
+  */
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/c3137954/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
index 5eaab95..8e0abda 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -45,7 +45,7 @@ import java.util.Random;
 
 public class TestThrift {
 
-  private static final Log LOG = LogFactory.getLog(JdbcConnector.class.getName());
+  private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
 
   private static IMetaStoreClient client = null;
   private static HiveConf conf;


[17/50] [abbrv] bigtop git commit: Added key value assignments to arguments. (cherry picked from commit 9cd5f71997eae0263b5a405d13dd6daff88b46eb)

Posted by rv...@apache.org.
Added key value assignments to arguments.
(cherry picked from commit 9cd5f71997eae0263b5a405d13dd6daff88b46eb)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/dcd9cb20
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/dcd9cb20
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/dcd9cb20

Branch: refs/heads/master
Commit: dcd9cb20ae680df0bd9a1ce62391fefb6b1c35fb
Parents: 47c31dc
Author: Raj Desai <rd...@us.ibm.com>
Authored: Thu Jan 19 15:08:45 2017 -0800
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:13 2017 -0700

----------------------------------------------------------------------
 .../org/odpi/specs/runtime/hive/HCatalogMR.java | 21 +++++++++++++++-----
 1 file changed, 16 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/dcd9cb20/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
index 7cb9bbe..4110d5d 100644
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
@@ -46,14 +46,25 @@ public class HCatalogMR extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
+    String inputTable = null;
+    String outputTable = null;
+    String inputSchemaStr = null;
+    String outputSchemaStr = null;
+    for(int i = 0; i < args.length; i++){
+        if(args[i].equalsIgnoreCase("-it")){
+            inputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-ot")){
+            outputTable = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-is")){
+            inputSchemaStr = args[i+1];
+        }else if(args[i].equalsIgnoreCase("-os")){
+            outputSchemaStr = args[i+1];
+        }
+    }
+    
     Configuration conf = getConf();
     args = new GenericOptionsParser(conf, args).getRemainingArgs();
 
-    String inputTable = args[0];
-    String outputTable = args[1];
-    String inputSchemaStr = args[2];
-    String outputSchemaStr = args[3];
-
     conf.set(INPUT_SCHEMA, inputSchemaStr);
     conf.set(OUTPUT_SCHEMA, outputSchemaStr);
 


[18/50] [abbrv] bigtop git commit: Added shell scripts to make it easier to run, and resource files with expected results for ODPi 2.1.

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
new file mode 100644
index 0000000..f62ee8e
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnExceptio
 n, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apach
 e.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache
 .hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, 
 java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apac
 he.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.
 yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util
 .Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.rec
 ords.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args
 ":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"v
 oid setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunch
 Context"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.
 async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Prior
 ity","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.ap
 ache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],
 "exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":
 "allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport g
 etApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org
 .apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["or
 g.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.uti
 l.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.r
 ecords.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws
  org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptio
 ns":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOExc
 eption":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient creat
 eNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void 
 cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/4f19c159/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
new file mode 100644
index 0000000..b394bff
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTo
 kenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long",
 "args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","r
 eturnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService
 (org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.had
 oop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}}
\ No newline at end of file


[05/50] [abbrv] bigtop git commit: removed commented code and comments

Posted by rv...@apache.org.
removed commented code and comments

removed commented code and comments
(cherry picked from commit 8067e908aead8ea0eefad13d983211513567cd2c)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/ccbdab47
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/ccbdab47
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/ccbdab47

Branch: refs/heads/master
Commit: ccbdab472c298b2d98e32c9cb2feb3a59bb0b779
Parents: b0571dc
Author: roypradeep <ro...@us.ibm.com>
Authored: Tue Nov 1 14:50:12 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:10 2017 -0700

----------------------------------------------------------------------
 .../odpi/specs/runtime/hive/TestBeeline.java    | 466 ++++++++++---------
 1 file changed, 257 insertions(+), 209 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/ccbdab47/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index 6d34e47..37c71cf 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -34,224 +34,272 @@ public class TestBeeline {
 
 	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
 
-	  private static final String URL = "odpiHiveTestJdbcUrl";
-	  private static final String USER = "odpiHiveTestJdbcUser";
-	  private static final String PASSWD = "odpiHiveTestJdbcPassword";
-	  
-	  private static Map<String, String> results;
-	  
-	  private static String beelineUrl; 
-	  private static String beelineUser;
-	  private static String beelinePasswd;
-	  
-	  @BeforeClass
-	  public static void checkHiveHome(){
-		  results = HiveHelper.execCommand(new CommandLine("echo").addArgument("$HIVE_HOME"));
-		  Assert.assertEquals("HIVE_HOME is not in the current path.", "", Integer.parseInt(results.get("outputStream")));
-		  TestBeeline.beelineUrl = System.getProperty(URL);
-		  TestBeeline.beelineUser = System.getProperty(USER);
-		  TestBeeline.beelinePasswd = System.getProperty(PASSWD);
-		  
-		  // Create Url with username and/or passowrd to handle all ways to connect to beeline
-		  
-		  if (beelineUser != null && beelineUser != "") { beelineUrl = beelineUrl+" -n "+beelineUser; }
-		  else if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") { beelineUrl = beelineUrl+" -n "+beelineUser+" -p "+"beelinePasswd"; }
-		  
-	  }
-	  
-	  @Test
-	  public static void checkBeeline() {
-	    
-	    LOG.info("URL is " + beelineUrl); 
-	    LOG.info("User is " + beelineUser);
-	    LOG.info("Passwd is " + beelinePasswd); 
-	    LOG.info("Passwd is null " + (beelinePasswd == null));
-	    
-	    results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl));
-	    String consoleMsg = results.get("outputStream").toLowerCase();
-	    //System.out.println(consoleMsg);
-	    try {
-			Assert.assertEquals("beeline is using beelineUrl", true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-			LOG.info("Beeline -u PASSED.");
-		} catch (AssertionError e) {
-			// TODO Auto-generated catch block
-			LOG.error("Beeline -u FAILED.");
-			LOG.error(results.get("outputStream"));
+	private static final String URL = "odpiHiveTestJdbcUrl";
+	private static final String USER = "odpiHiveTestJdbcUser";
+	private static final String PASSWD = "odpiHiveTestJdbcPassword";
+
+	private static Map<String, String> results;
+
+	private static String beelineUrl; 
+	private static String beelineUser;
+	private static String beelinePasswd;
+	private static String testUrl;
+
+	//flags to check if username and password should be added as argument in some tests
+	private static boolean bothUserPass = false;
+	private static boolean onlyUser = false;
+
+	@BeforeClass
+	public static void initialSetup(){
+
+		TestBeeline.beelineUrl = System.getProperty(URL);
+		TestBeeline.beelineUser = System.getProperty(USER);
+		TestBeeline.beelinePasswd =System.getProperty(PASSWD);
+		TestBeeline.testUrl = System.getProperty(URL);
+
+		// Create Url with username and/or passowrd to handle all ways to connect to beeline
+		if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") 
+		{ 
+			testUrl = beelineUrl+" -n "+beelineUser+" -p "+beelinePasswd; 
+			bothUserPass=true;
 		}
-	    
- 	  }
-	  
-	  @Test
-	  public static void checkBeelineConnect(){
-		  try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+";"); out.println("!quit"); } 
-		  catch (FileNotFoundException e1) {
-			// TODO Auto-generated catch block
-			e1.printStackTrace();
+		else if (beelineUser != null && beelineUser != "") 
+		{ 
+			testUrl = beelineUrl+" -n "+beelineUser; 
+			onlyUser=true;
 		}
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		   
-		    try {
-				Assert.assertEquals("beeline is able to connect to " +beelineUrl, true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );
-				LOG.info("Beeline !connect PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline !connect FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-	  }
-	  
-	  @Test
-	  public static void checkBeelineHelp(){
-		   results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		    try {
-				Assert.assertEquals("beeline help works", true, consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeLine" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline --help PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline --help FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-	  }
-
-	  @Test
-	  public static void checkBeelineQueryExecFromCmdLine(){
-		  results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES"));
-		  
-		  if(!results.get("outputStream").contains("odpi_runtime_hive")){
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
-				
-			}else{
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
-				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+		System.out.println("Setting url"+testUrl); 
+
+		LOG.info("URL is " + beelineUrl); 
+		LOG.info("User is " + beelineUser);
+		LOG.info("Passwd is " + beelinePasswd); 
+		LOG.info("Passwd is null " + (beelinePasswd == null));
+	}
+
+	@Test
+	public void checkBeeline() {
+
+		System.out.println(beelineUrl);  
+
+		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(testUrl));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -u FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+
+	}
+
+	@Test
+	public void checkBeelineConnect(){
+		try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+" "+beelineUser+" "+beelinePasswd+";"); out.println("!quit;"); } 
+		catch (FileNotFoundException e1) {
 			
-			}
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		  try {
-				Assert.assertEquals("beeline execution works", true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline -e PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline -e FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-		  	
-		  HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));		    
-	  }
-	  
-	  @Test
-	  public static void checkBeelineQueryExecFromFile() throws FileNotFoundException{
-		  
-			try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
-			try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
-			try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
-		 	try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f1.sql",false));
-
-		  if(!results.get("outputStream").contains("odpi_runtime_hive")){
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f2.sql",false));
-				
+			e1.printStackTrace();
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+
+
+		Assert.assertEquals("beeline !connect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );  
+	}
+
+	@Test
+	public void checkBeelineHelp(){
+		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --help FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("display this message" ) && consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeline") && !consoleMsg.contains("exception"));
+
+	}
+
+	@Test
+	public void checkBeelineQueryExecFromCmdLine(){
+
+		if (bothUserPass) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
+
+			if(!results.get("outputStream").contains("odpi_runtime_hive")){
+
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
 			}else{
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f3.sql",false));
+
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("SHOW DATABASES;"));
+
 			}
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f1.sql",false));
-		  
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		  try {
-				Assert.assertEquals("beeline execution with file works", true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline -f PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline -f FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-		  
-		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f4.sql",false));		    
-	  }
-	  
-	  public static void checkBeelineInitFile() throws FileNotFoundException{
-		  
-			try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
-			try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-			try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
-		 	try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
-		 	
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i1.sql",false));
-	  
-		  if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i2.sql",false));
-				
+			String consoleMsg = results.get("outputStream").toLowerCase();
+			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		}
+		else if (onlyUser) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
+
+			if(!results.get("outputStream").contains("odpi_runtime_hive")){
+
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
 			}else{
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i3.sql",false));
+
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("SHOW DATABASES;"));
+
 			}
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i1.sql",false));
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		  try {
-				Assert.assertEquals("beeline execution with init file works", true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline -i PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline -i FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-
-		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i4.sql",false));		    
-	  }
-	  
-	  public static void checkBeelineHiveVar() throws FileNotFoundException{
-		  
-			try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
-			try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
-			try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
-		 	try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
-		 	
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
-		  String consoleMsg = results.get("outputStream");
-		  
-		  if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv2.sql",false));
-				
+			String consoleMsg = results.get("outputStream").toLowerCase();
+			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		}
+		else {
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES;"));
+
+			if(!results.get("outputStream").contains("odpi_runtime_hive")){
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+
 			}else{
-				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv3.sql",false));
-			}
-		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
-		  consoleMsg = results.get("outputStream").toLowerCase();
-
-		  try {
-				Assert.assertEquals("beeline execution with hivevar file works", true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline --hivevar PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline --hivevar FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-		  	
-		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv4.sql",false));		    
-	  }
-	  
-	  @Test
-	  public static void CheckBeelineFastConnect(){
-		   results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--fastConnect=false"));
-		  String consoleMsg = results.get("outputStream").toLowerCase();
-		    
-		    try {
-				Assert.assertEquals("beeline fastConnect works", true, consoleMsg.contains("set fastconnect to true to skip") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
-				LOG.info("Beeline --fastConnect PASSED.");
-			} catch (AssertionError e) {
-				// TODO Auto-generated catch block
-				LOG.error("Beeline --fastConnect FAILED.");
-				LOG.error(results.get("outputStream"));
-			}  
-	  }
-	  
-	  @AfterClass
-	  public static void cleanup() throws FileNotFoundException {
-	    
-		  	results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive;"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES;"));
 			
-	  }
+
+			}
+			String consoleMsg = results.get("outputStream").toLowerCase();
+			Assert.assertEquals("beeline -e FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+			HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+		}
+	}
+
+	@Test
+	public void checkBeelineQueryExecFromFile() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f1.sql",false));
+
+		if(!results.get("outputStream").contains("odpi_runtime_hive")){
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f2.sql",false));
+
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f3.sql",false));
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f1.sql",false));
+
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -f FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -f beeline-f4.sql",false));		    
+	}
+
+	@Test
+	public void checkBeelineInitFile() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
+
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i1.sql",false));
+
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i2.sql",false));
+
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i3.sql",false));
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i1.sql",false));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline -i FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" -i beeline-i4.sql",false));		    
+	}
+
+	@Test
+	public void checkBeelineHiveVar() throws FileNotFoundException{
+
+		try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
+		try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
+
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
+		String consoleMsg = results.get("outputStream");
+
+		if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv2.sql",false));
+
+		}else{
+			results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv3.sql",false));
+		}
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
+		consoleMsg = results.get("outputStream").toLowerCase();
+
+		Assert.assertEquals("beeline --hivevar FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+		HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+testUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv4.sql",false));		    
+	}
+
+	@Test
+	public void checkBeelineFastConnect(){
+		results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(testUrl).addArgument("--fastConnect=false"));
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --fastConnect FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("set fastconnect to true to skip")); 
+	}
+
+	@Test
+	public void checkBeelineVerbose(){
+
+		//explicit check for username password again as url containing -u -p is not working in single addArgument function with testUrl
+
+		if (bothUserPass) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("--verbose=true"));
+		}
+		else if (onlyUser) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("--verbose=true"));
+		}
+		else {
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--verbose=true"));
+		}
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --verbose FAILED using url "+testUrl+". \n" +results.get("outputStream"), true, consoleMsg.contains("issuing: !connect jdbc:hive2:") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+	}
+
+	@Test
+	public void checkBeelineShowHeader(){
+		
+		//explicit check for username password again as url containing -u -p is not working in single addArgument function with testUrl
+
+		if (bothUserPass) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("-p").addArgument(beelinePasswd).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+		}
+		else if (onlyUser) 
+		{ 
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-n").addArgument(beelineUser).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+		}
+		else {
+			results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--showHeader=false").addArgument("-e").addArgument("SHOW DATABASES;"));
+		}
+		String consoleMsg = results.get("outputStream").toLowerCase();
+		Assert.assertEquals("beeline --showHeader FAILED. \n" +results.get("outputStream"), true, consoleMsg.contains("default")&&!consoleMsg.contains("database_name") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+
+	}
+
+	@AfterClass
+	public static void cleanup() throws FileNotFoundException {
+
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+		results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf connect.url", false));
+
+	}
+
 
 
-	  
 }


[06/50] [abbrv] bigtop git commit: initial commit for odpi beeline tests (cherry picked from commit 11b24d6f7e9fd27d5abe958b1f319a8afda4f083)

Posted by rv...@apache.org.
initial commit for odpi beeline tests
(cherry picked from commit 11b24d6f7e9fd27d5abe958b1f319a8afda4f083)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/bbec845f
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/bbec845f
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/bbec845f

Branch: refs/heads/master
Commit: bbec845f319d38b6ea21cb5e1810e8ecd137a6bb
Parents: a9be675
Author: roypr <ro...@us.ibm.com>
Authored: Mon Oct 31 14:33:48 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:10 2017 -0700

----------------------------------------------------------------------
 .../odpi/specs/runtime/hive/TestBeeline.java    | 257 +++++++++++++++++++
 1 file changed, 257 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/bbec845f/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
new file mode 100644
index 0000000..dcd105b
--- /dev/null
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -0,0 +1,257 @@
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.Map;
+
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class TestBeeline {
+
+	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
+
+	  private static final String URL = "odpiHiveTestBeelineUrl";
+	  private static final String USER = "odpiHiveTestBeelineUser";
+	  private static final String PASSWD = "odpiHiveTestBeelinePassword";
+	  
+	  private static Map<String, String> results;
+	  
+	  private static String beelineUrl; 
+	  private static String beelineUser;
+	  private static String beelinePasswd;
+	  
+	  @BeforeClass
+	  public static void checkHiveHome(){
+		  results = HiveHelper.execCommand(new CommandLine("echo").addArgument("$HIVE_HOME"));
+		  Assert.assertEquals("HIVE_HOME is not in the current path.", "", Integer.parseInt(results.get("outputStream")));
+		  TestBeeline.beelineUrl = System.getProperty(URL);
+		  TestBeeline.beelineUser = System.getProperty(USER);
+		  TestBeeline.beelinePasswd = System.getProperty(PASSWD);
+		  
+		  // Create Url with username and/or passowrd to handle all ways to connect to beeline
+		  
+		  if (beelineUser != null && beelineUser != "") { beelineUrl = beelineUrl+" -n "+beelineUser; }
+		  else if (beelineUser != null && beelineUser != "" && beelinePasswd != null && beelinePasswd != "") { beelineUrl = beelineUrl+" -n "+beelineUser+" -p "+"beelinePasswd"; }
+		  
+	  }
+	  
+	  @Test
+	  public static void checkBeeline() {
+	    
+	    LOG.info("URL is " + beelineUrl); 
+	    LOG.info("User is " + beelineUser);
+	    LOG.info("Passwd is " + beelinePasswd); 
+	    LOG.info("Passwd is null " + (beelinePasswd == null));
+	    
+	    results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl));
+	    String consoleMsg = results.get("outputStream").toLowerCase();
+	    //System.out.println(consoleMsg);
+	    try {
+			Assert.assertEquals("beeline is using beelineUrl", true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+			LOG.info("Beeline -u PASSED.");
+		} catch (AssertionError e) {
+			// TODO Auto-generated catch block
+			LOG.error("Beeline -u FAILED.");
+			LOG.error(results.get("outputStream"));
+		}
+	    
+ 	  }
+	  
+	  @Test
+	  public static void checkBeelineConnect(){
+		  try(PrintWriter out = new PrintWriter("connect.url")){ out.println("!connect " + beelineUrl+";"); out.println("!quit"); } 
+		  catch (FileNotFoundException e1) {
+			// TODO Auto-generated catch block
+			e1.printStackTrace();
+		}
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -f connect.url",false));
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		   
+		    try {
+				Assert.assertEquals("beeline is able to connect to " +beelineUrl, true, consoleMsg.contains("connecting to "+beelineUrl) && !consoleMsg.contains("error") && !consoleMsg.contains("exception") );
+				LOG.info("Beeline !connect PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline !connect FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+	  }
+	  
+	  @Test
+	  public static void checkBeelineHelp(){
+		   results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("--help"));
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		    try {
+				Assert.assertEquals("beeline help works", true, consoleMsg.contains("usage: java org.apache.hive.cli.beeline.beeLine" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline --help PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline --help FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+	  }
+
+	  @Test
+	  public static void checkBeelineQueryExecFromCmdLine(){
+		  results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("SHOW DATABASES"));
+		  
+		  if(!results.get("outputStream").contains("odpi_runtime_hive")){
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+				
+			}else{
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));
+				results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("CREATE DATABASE odpi_runtime_hive"));
+			
+			}
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		  try {
+				Assert.assertEquals("beeline execution works", true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline -e PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline -e FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+		  	
+		  HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("-e").addArgument("DROP DATABASE odpi_runtime_hive"));		    
+	  }
+	  
+	  @Test
+	  public static void checkBeelineQueryExecFromFile() throws FileNotFoundException{
+		  
+			try(PrintWriter out = new PrintWriter("beeline-f1.sql")){ out.println("SHOW DATABASES;"); }
+			try(PrintWriter out = new PrintWriter("beeline-f2.sql")){ out.println("CREATE DATABASE odpi_runtime_hive;"); }
+			try(PrintWriter out = new PrintWriter("beeline-f3.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); out.println("CREATE DATABASE odpi_runtime_hive;"); }
+		 	try(PrintWriter out = new PrintWriter("beeline-f4.sql")){ out.println("DROP DATABASE odpi_runtime_hive;"); }
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f1.sql",false));
+
+		  if(!results.get("outputStream").contains("odpi_runtime_hive")){
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f2.sql",false));
+				
+			}else{
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f3.sql",false));
+			}
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f1.sql",false));
+		  
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		  try {
+				Assert.assertEquals("beeline execution with file works", true, consoleMsg.contains("odpi_runtime_hive" ) && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline -f PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline -f FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+		  
+		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -f beeline-f4.sql",false));		    
+	  }
+	  
+	  public static void checkBeelineInitFile() throws FileNotFoundException{
+		  
+			try(PrintWriter out = new PrintWriter("beeline-i1.sql")){ out.println("SHOW DATABASES;"); }
+			try(PrintWriter out = new PrintWriter("beeline-i2.sql")){ out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+			try(PrintWriter out = new PrintWriter("beeline-i3.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); out.println("CREATE DATABASE odpi_runtime_beeline_init;"); }
+		 	try(PrintWriter out = new PrintWriter("beeline-i4.sql")){ out.println("DROP DATABASE odpi_runtime_beeline_init;"); }
+		 	
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i1.sql",false));
+	  
+		  if(!results.get("outputStream").contains("odpi_runtime_beeline_init")){
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i2.sql",false));
+				
+			}else{
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i3.sql",false));
+			}
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i1.sql",false));
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		  try {
+				Assert.assertEquals("beeline execution with init file works", true, consoleMsg.contains("odpi_runtime_beeline_init") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline -i PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline -i FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+
+		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" -i beeline-i4.sql",false));		    
+	  }
+	  
+	  public static void checkBeelineHiveVar() throws FileNotFoundException{
+		  
+			try(PrintWriter out = new PrintWriter("beeline-hv1.sql")){ out.println("SHOW DATABASES;"); }
+			try(PrintWriter out = new PrintWriter("beeline-hv2.sql")){ out.println("CREATE DATABASE ${db};"); }
+			try(PrintWriter out = new PrintWriter("beeline-hv3.sql")){ out.println("DROP DATABASE ${db};"); out.println("CREATE DATABASE ${db};"); }
+		 	try(PrintWriter out = new PrintWriter("beeline-hv4.sql")){ out.println("DROP DATABASE ${db};"); }
+		 	
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
+		  String consoleMsg = results.get("outputStream");
+		  
+		  if(!results.get("outputStream").contains("odpi_runtime_beeline_hivevar")){
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv2.sql",false));
+				
+			}else{
+				results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv3.sql",false));
+			}
+		  results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv1.sql",false));
+		  consoleMsg = results.get("outputStream").toLowerCase();
+
+		  try {
+				Assert.assertEquals("beeline execution with hivevar file works", true, consoleMsg.contains("odpi_runtime_beeline_hivevar") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline --hivevar PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline --hivevar FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+		  	
+		  HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("beeline -u "+beelineUrl+" --hivevar db=odpi_runtime_beeline_hivevar -i beeline-hv4.sql",false));		    
+	  }
+	  
+	  @Test
+	  public static void CheckBeelineFastConnect(){
+		   results = HiveHelper.execCommand(new CommandLine("beeline").addArgument("-u").addArgument(beelineUrl).addArgument("--fastConnect=false"));
+		  String consoleMsg = results.get("outputStream").toLowerCase();
+		    
+		    try {
+				Assert.assertEquals("beeline fastConnect works", true, consoleMsg.contains("set fastconnect to true to skip") && !consoleMsg.contains("error") && !consoleMsg.contains("exception"));
+				LOG.info("Beeline --fastConnect PASSED.");
+			} catch (AssertionError e) {
+				// TODO Auto-generated catch block
+				LOG.error("Beeline --fastConnect FAILED.");
+				LOG.error(results.get("outputStream"));
+			}  
+	  }
+	  
+	  @AfterClass
+	  public static void cleanup() throws FileNotFoundException {
+	    
+		  	results = HiveHelper.execCommand(new CommandLine("/bin/sh").addArgument("-c").addArgument("rm -rf beeline*.sql", false));
+			
+	  }
+
+
+	  
+}


[35/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list
deleted file mode 100644
index ab6cd51..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-bin.list
+++ /dev/null
@@ -1,2 +0,0 @@
-rcc
-hadoop

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
deleted file mode 100644
index 2edbd0f..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-jar.list
+++ /dev/null
@@ -1,60 +0,0 @@
-api-util-1\.0\.0-M20[\.\-_].*jar
-curator-recipes-2\.7\.1[\.\-_].*jar
-curator-framework-2\.7\.1[\.\-_].*jar
-netty-3\.6\.2\.Final[\.\-_].*jar
-gson-2\.2\.4[\.\-_].*jar
-paranamer-2\.3[\.\-_].*jar
-jackson-core-asl-1\.9\.13[\.\-_].*jar
-jackson-xc-1\.9\.13[\.\-_].*jar
-jersey-server-1\.9[\.\-_].*jar
-stax-api-1\.0-2[\.\-_].*jar
-zookeeper-3\.4\.6[\.\-_].*jar
-htrace-core-3\.1\.0-incubating[\.\-_].*jar
-slf4j-api-1\.7\.10[\.\-_].*jar
-avro-1\.7\.[4-7][\.\-_].*jar
-slf4j-log4j12-1\.7\.10[\.\-_].*jar
-curator-client-2\.7\.1[\.\-_].*jar
-jets3t-0\.9\.0[\.\-_].*jar
-commons-net-3\.1[\.\-_].*jar
-jaxb-impl-2\.2\.3-1[\.\-_].*jar
-httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
-apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
-commons-cli-1\.2[\.\-_].*jar
-log4j-1\.2\.17[\.\-_].*jar
-jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-java-xmlbuilder-0\.4[\.\-_].*jar
-jsp-api-2\.1[\.\-_].*jar
-guava-11\.0\.2[\.\-_].*jar
-jetty-6\.1\.26[\.\-_].*jar
-commons-logging-1\.1\.3[\.\-_].*jar
-snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-commons-httpclient-3\.1[\.\-_].*jar
-jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
-jersey-core-1\.9[\.\-_].*jar
-commons-compress-1\.4\.1[\.\-_].*jar
-jettison-1\.1[\.\-_].*jar
-junit-4\.11[\.\-_].*jar
-commons-collections-3\.2\.[12][\.\-_].*jar
-xz-1\.0[\.\-_].*jar
-asm-3\.2[\.\-_].*jar
-commons-codec-1\.4[\.\-_].*jar
-commons-digester-1\.8[\.\-_].*jar
-api-asn1-api-1\.0\.0-M20[\.\-_].*jar
-xmlenc-0\.52[\.\-_].*jar
-commons-configuration-1\.6[\.\-_].*jar
-mockito-all-1\.8\.5[\.\-_].*jar
-commons-lang-2\.6[\.\-_].*jar
-jetty-util-6\.1\.26[\.\-_].*jar
-jsr305-3\.0\.0[\.\-_].*jar
-protobuf-java-2\.5\.0[\.\-_].*jar
-httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
-commons-io-2\.4[\.\-_].*jar
-activation-1\.1[\.\-_].*jar
-jersey-json-1\.9[\.\-_].*jar
-jaxb-api-2\.2\.2[\.\-_].*jar
-commons-math3-3\.1\.1[\.\-_].*jar
-hamcrest-core-1\.3[\.\-_].*jar
-commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
-apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
-servlet-api-2\.5[\.\-_].*jar
-jackson-jaxrs-1\.9\.13[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
deleted file mode 100644
index 73ff182..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common.list
+++ /dev/null
@@ -1,230 +0,0 @@
-bin
-bin/rcc
-bin/hadoop
-sbin
-sbin/hadoop-daemons\.sh
-sbin/hadoop-daemon\.sh
-sbin/slaves\.sh
-hadoop-annotations-2\.7\.[0-9][\.\-_].*jar
-hadoop-common-2\.7\.[0-9][\.\-_].*jar
-hadoop-annotations[\.\-_].*jar
-hadoop-common-2\.7\.[0-9].*-tests\.jar
-etc
-etc/hadoop
-hadoop-common[\.\-_].*jar
-hadoop-auth-2\.7\.[0-9][\.\-_].*jar
-libexec
-libexec/hdfs-config\.sh
-libexec/hadoop-layout\.sh
-libexec/yarn-config\.sh
-libexec/mapred-config\.sh
-libexec/hadoop-config\.sh
-libexec/init-hdfs\.sh
-hadoop-auth[\.\-_].*jar
-hadoop-nfs[\.\-_].*jar
-hadoop-nfs-2\.7\.[0-9][\.\-_].*jar
-client
-client/curator-recipes[\.\-_].*jar
-client/curator-recipes-2\.7\.1[\.\-_].*jar
-client/commons-configuration[\.\-_].*jar
-client/jsr305[\.\-_].*jar
-client/slf4j-log4j12[\.\-_].*jar
-client/hadoop-mapreduce-client-core[\.\-_].*jar
-client/hadoop-hdfs[\.\-_].*jar
-client/commons-configuration-1\.6[\.\-_].*jar
-client/commons-cli-1\.2[\.\-_].*jar
-client/hadoop-mapreduce-client-common-2\.7\.[0-9][\.\-_].*jar
-client/commons-digester-1\.8[\.\-_].*jar
-client/curator-client-2\.7\.1[\.\-_].*jar
-client/httpclient[\.\-_].*jar
-client/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
-client/jsp-api-2\.1[\.\-_].*jar
-client/leveldbjni-all-1\.8[\.\-_].*jar
-client/slf4j-api-1\.7\.10[\.\-_].*jar
-client/hadoop-annotations-2\.7\.[0-9][\.\-_].*jar
-client/jersey-core[\.\-_].*jar
-client/commons-compress[\.\-_].*jar
-client/stax-api[\.\-_].*jar
-client/jaxb-api-2\.2\.2[\.\-_].*jar
-client/api-util-1\.0\.0-M20[\.\-_].*jar
-client/jackson-xc[\.\-_].*jar
-client/commons-cli[\.\-_].*jar
-client/xml-apis[\.\-_].*jar
-client/curator-client[\.\-_].*jar
-client/curator-framework-2\.7\.1[\.\-_].*jar
-client/commons-io-2\.4[\.\-_].*jar
-client/jackson-core-asl[\.\-_].*jar
-client/avro[\.\-_].*jar
-client/hadoop-mapreduce-client-app[\.\-_].*jar
-client/jetty-util[\.\-_].*jar
-client/guava[\.\-_].*jar
-client/commons-beanutils[\.\-_].*jar
-client/apacheds-i18n[\.\-_].*jar
-client/jetty-util-6\.1\.26[\.\-_].*jar
-client/xercesImpl-2\.9\.1[\.\-_].*jar
-client/commons-logging[\.\-_].*jar
-client/slf4j-api[\.\-_].*jar
-client/commons-digester[\.\-_].*jar
-client/avro-1\.7\.[4-7][\.\-_].*jar
-client/hadoop-common-2\.7\.[0-9][\.\-_].*jar
-client/commons-math3[\.\-_].*jar
-client/hadoop-yarn-common-2\.7\.[0-9][\.\-_].*jar
-client/hadoop-annotations[\.\-_].*jar
-client/xercesImpl[\.\-_].*jar
-client/commons-codec[\.\-_].*jar
-client/netty-3\.6\.2\.Final[\.\-_].*jar
-client/commons-collections[\.\-_].*jar
-client/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
-client/hadoop-mapreduce-client-jobclient[\.\-_].*jar
-client/htrace-core[\.\-_].*jar
-client/jersey-core-1\.9[\.\-_].*jar
-client/xz[\.\-_].*jar
-client/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-client/jsp-api[\.\-_].*jar
-client/commons-httpclient[\.\-_].*jar
-client/netty[\.\-_].*jar
-client/hadoop-mapreduce-client-shuffle-2\.7\.[0-9][\.\-_].*jar
-client/commons-net[\.\-_].*jar
-client/hadoop-yarn-server-common[\.\-_].*jar
-client/jaxb-api[\.\-_].*jar
-client/apacheds-kerberos-codec[\.\-_].*jar
-client/httpcore[\.\-_].*jar
-client/hadoop-yarn-server-common-2\.7\.[0-9][\.\-_].*jar
-client/hadoop-common[\.\-_].*jar
-client/leveldbjni-all[\.\-_].*jar
-client/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-client/gson-2\.2\.4[\.\-_].*jar
-client/commons-net-3\.1[\.\-_].*jar
-client/api-util[\.\-_].*jar
-client/commons-compress-1\.4\.1[\.\-_].*jar
-client/jackson-xc-1\.9\.13[\.\-_].*jar
-client/netty-all-4\.0\.23\.Final[\.\-_].*jar
-client/xmlenc-0\.52[\.\-_].*jar
-client/jackson-jaxrs[\.\-_].*jar
-client/api-asn1-api[\.\-_].*jar
-client/api-asn1-api-1\.0\.0-M20[\.\-_].*jar
-client/commons-codec-1\.4[\.\-_].*jar
-client/jackson-core-asl-1\.9\.13[\.\-_].*jar
-client/servlet-api-2\.5[\.\-_].*jar
-client/commons-beanutils(-core)?[\.\-_].*jar
-client/paranamer-2\.3[\.\-_].*jar
-client/hadoop-yarn-api-2\.7\.[0-9][\.\-_].*jar
-client/hadoop-mapreduce-client-shuffle[\.\-_].*jar
-client/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
-client/hadoop-yarn-common[\.\-_].*jar
-client/hadoop-auth-2\.7\.[0-9][\.\-_].*jar
-client/snappy-java[\.\-_].*jar
-client/gson[\.\-_].*jar
-client/xml-apis-1\.3\.04[\.\-_].*jar
-client/commons-io[\.\-_].*jar
-client/commons-math3-3\.1\.1[\.\-_].*jar
-client/log4j[\.\-_].*jar
-client/hadoop-auth[\.\-_].*jar
-client/log4j-1\.2\.17[\.\-_].*jar
-client/servlet-api[\.\-_].*jar
-client/hadoop-hdfs-2\.7\.[0-9][\.\-_].*jar
-client/activation[\.\-_].*jar
-client/zookeeper[\.\-_].*jar
-client/xmlenc[\.\-_].*jar
-client/stax-api-1\.0-2[\.\-_].*jar
-client/hadoop-yarn-client-2\.7\.[0-9][\.\-_].*jar
-client/jersey-client-1\.9[\.\-_].*jar
-client/hadoop-mapreduce-client-common[\.\-_].*jar
-client/xz-1\.0[\.\-_].*jar
-client/zookeeper-3\.4\.6[\.\-_].*jar
-client/activation-1\.1[\.\-_].*jar
-client/hadoop-mapreduce-client-jobclient-2\.7\.[0-9][\.\-_].*jar
-client/htrace-core-3\.1\.0-incubating[\.\-_].*jar
-client/protobuf-java-2\.5\.0[\.\-_].*jar
-client/hadoop-mapreduce-client-app-2\.7\.[0-9][\.\-_].*jar
-client/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
-client/commons-lang[\.\-_].*jar
-client/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
-client/paranamer[\.\-_].*jar
-client/hadoop-yarn-api[\.\-_].*jar
-client/jersey-client[\.\-_].*jar
-client/hadoop-mapreduce-client-core-2\.7\.[0-9][\.\-_].*jar
-client/curator-framework[\.\-_].*jar
-client/guava-11\.0\.2[\.\-_].*jar
-client/jsr305-3\.0\.0[\.\-_].*jar
-client/hadoop-yarn-client[\.\-_].*jar
-client/jackson-jaxrs-1\.9\.13[\.\-_].*jar
-client/commons-httpclient-3\.1[\.\-_].*jar
-client/commons-collections-3\.2\.[12][\.\-_].*jar
-client/netty-all[\.\-_].*jar
-client/slf4j-log4j12-1\.7\.10[\.\-_].*jar
-client/protobuf-java[\.\-_].*jar
-client/jackson-mapper-asl[\.\-_].*jar
-client/commons-logging-1\.1\.3[\.\-_].*jar
-client/commons-lang-2\.6[\.\-_].*jar
-lib
-lib/curator-recipes-2\.7\.1[\.\-_].*jar
-lib/commons-configuration-1\.6[\.\-_].*jar
-lib/commons-cli-1\.2[\.\-_].*jar
-lib/commons-digester-1\.8[\.\-_].*jar
-lib/curator-client-2\.7\.1[\.\-_].*jar
-lib/commons-beanutils(-core)?-1\.[78]\.0[\.\-_].*jar
-lib/jsp-api-2\.1[\.\-_].*jar
-lib/jets3t-0\.9\.0[\.\-_].*jar
-lib/slf4j-api-1\.7\.10[\.\-_].*jar
-lib/jaxb-api-2\.2\.2[\.\-_].*jar
-lib/api-util-1\.0\.0-M20[\.\-_].*jar
-lib/jettison-1\.1[\.\-_].*jar
-lib/curator-framework-2\.7\.1[\.\-_].*jar
-lib/commons-io-2\.4[\.\-_].*jar
-lib/jetty-util-6\.1\.26[\.\-_].*jar
-lib/avro-1\.7\.[4-7][\.\-_].*jar
-lib/jaxb-impl-2\.2\.3-1[\.\-_].*jar
-lib/netty-3\.6\.2\.Final[\.\-_].*jar
-lib/httpcore-4\.[0-9]\.[0-9][\.\-_].*jar
-lib/jsch-0\.1\.(4[2-9]|[5-9]\d)[\.\-_].*jar
-lib/jersey-core-1\.9[\.\-_].*jar
-lib/jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-lib/snappy-java-1\.0\.[45](\.[0-9])?[\.\-_].*jar
-lib/gson-2\.2\.4[\.\-_].*jar
-lib/commons-net-3\.1[\.\-_].*jar
-lib/asm-3\.2[\.\-_].*jar
-lib/commons-compress-1\.4\.1[\.\-_].*jar
-lib/mockito-all-1\.8\.5[\.\-_].*jar
-lib/jackson-xc-1\.9\.13[\.\-_].*jar
-lib/junit-4\.11[\.\-_].*jar
-lib/jersey-json-1\.9[\.\-_].*jar
-lib/xmlenc-0\.52[\.\-_].*jar
-lib/api-asn1-api-1\.0\.0-M20[\.\-_].*jar
-lib/commons-codec-1\.4[\.\-_].*jar
-lib/jackson-core-asl-1\.9\.13[\.\-_].*jar
-lib/servlet-api-2\.5[\.\-_].*jar
-lib/paranamer-2\.3[\.\-_].*jar
-lib/native
-lib/native/libhadoop\.a
-lib/native/libhadoop\.so
-lib/native/libhdfs\.a
-lib/native/libsnappy\.so[.0-9]*
-lib/native/libsnappy\.so
-lib/native/libhadoop\.so[.0-9]*
-lib/native/libhadooputils\.a
-lib/native/libsnappy\.so[.0-9]*
-lib/native/libhadooppipes\.a
-lib/jetty-6\.1\.26[\.\-_].*jar
-lib/jersey-server-1\.9[\.\-_].*jar
-lib/apacheds-i18n-2\.0\.0-M15[\.\-_].*jar
-lib/commons-math3-3\.1\.1[\.\-_].*jar
-lib/log4j-1\.2\.17[\.\-_].*jar
-lib/hamcrest-core-1\.3[\.\-_].*jar
-lib/stax-api-1\.0-2[\.\-_].*jar
-lib/xz-1\.0[\.\-_].*jar
-lib/zookeeper-3\.4\.6[\.\-_].*jar
-lib/activation-1\.1[\.\-_].*jar
-lib/htrace-core-3\.1\.0-incubating[\.\-_].*jar
-lib/protobuf-java-2\.5\.0[\.\-_].*jar
-lib/apacheds-kerberos-codec-2\.0\.0-M15[\.\-_].*jar
-lib/java-xmlbuilder-0\.4[\.\-_].*jar
-lib/httpclient-4\.[0-9]\.[0-9][\.\-_].*jar
-lib/guava-11\.0\.2[\.\-_].*jar
-lib/jsr305-3\.0\.0[\.\-_].*jar
-lib/jackson-jaxrs-1\.9\.13[\.\-_].*jar
-lib/commons-httpclient-3\.1[\.\-_].*jar
-lib/commons-collections-3\.2\.[12][\.\-_].*jar
-lib/slf4j-log4j12-1\.7\.10[\.\-_].*jar
-lib/commons-logging-1\.1\.3[\.\-_].*jar
-lib/commons-lang-2\.6[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
deleted file mode 100644
index b5e2265..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-hdfs","version":"2.7.3","classes":{"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.namenode.NameNodeMXBean","methods":{"long getTotal()":{"name":"getTotal","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDeadNodes()":{"name":"getDeadNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"int getDistinctVersionCount()":{"name":"getDistinctVersionCount","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean getRollingUpgradeStatus()":{"name":"getRollingUpgradeStatus","returnType":"org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo$Bean","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Map getDistinctVersions()":{"name":"getDistinctVersions","returnType":"java.util.Map","args":[],"exceptions":[]},"int getThreads()":{"name":"getThreads","returnType
 ":"int","args":[],"exceptions":[]},"java.lang.String getJournalTransactionInfo()":{"name":"getJournalTransactionInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentBlockPoolUsed()":{"name":"getPercentBlockPoolUsed","returnType":"float","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getLiveNodes()":{"name":"getLiveNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockPoolUsedSpace()":{"name":"getBlockPoolUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getSafemode()":{"name":"getSafemode","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCorruptFiles()":{"name":"getCorruptFiles","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getSoftwareVersion()":{"name":"getSoftwareVersion","returnType":"java.lang.String","args":[],"exceptions":[]
 },"long getTotalFiles()":{"name":"getTotalFiles","returnType":"long","args":[],"exceptions":[]},"long getCacheUsed()":{"name":"getCacheUsed","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameDirStatuses()":{"name":"getNameDirStatuses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getCompileInfo()":{"name":"getCompileInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNodeUsage()":{"name":"getNodeUsage","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNumberOfMissingBlocksWithReplicationFactorOne()":{"name":"getNumberOfMissingBlocksWithReplicationFactorOne","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNameJournalStatus()":{"name":"getNameJournalStatus","returnType":"java.lang.String","args":[],"exceptions":[]},"long getNonDfsUsedSpace()":{"name":"getNonDfsUsedSpace","returnType":"long","args":[],"exceptions":[]},"java.lang.String getNNStarted()":{"name":"ge
 tNNStarted","returnType":"java.lang.String","args":[],"exceptions":[]},"float getPercentRemaining()":{"name":"getPercentRemaining","returnType":"float","args":[],"exceptions":[]},"boolean isUpgradeFinalized()":{"name":"isUpgradeFinalized","returnType":"boolean","args":[],"exceptions":[]},"long getTotalBlocks()":{"name":"getTotalBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getBlockPoolId()":{"name":"getBlockPoolId","returnType":"java.lang.String","args":[],"exceptions":[]},"long getUsed()":{"name":"getUsed","returnType":"long","args":[],"exceptions":[]},"long getNumberOfMissingBlocks()":{"name":"getNumberOfMissingBlocks","returnType":"long","args":[],"exceptions":[]},"java.lang.String getDecomNodes()":{"name":"getDecomNodes","returnType":"java.lang.String","args":[],"exceptions":[]},"long getFree()":{"name":"getFree","returnType":"long","args":[],"exceptions":[]},"float getPercentUsed()":{"name":"getPercentUsed","returnType":"float","args":[],"exceptions":
 []},"long getCacheCapacity()":{"name":"getCacheCapacity","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean":{"name":"org.apache.hadoop.hdfs.server.datanode.DataNodeMXBean","methods":{"java.util.Map getDatanodeNetworkCounts()":{"name":"getDatanodeNetworkCounts","returnType":"java.util.Map","args":[],"exceptions":[]},"java.lang.String getClusterId()":{"name":"getClusterId","returnType":"java.lang.String","args":[],"exceptions":[]},"int getXceiverCount()":{"name":"getXceiverCount","returnType":"int","args":[],"exceptions":[]},"java.lang.String getHttpPort()":{"name":"getHttpPort","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVersion()":{"name":"getVersion","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getNamenodeAddresses()":{"name":"getNamenodeAddresses","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getVolumeInfo()":{"name":"getVolumeInf
 o","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getRpcPort()":{"name":"getRpcPort","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.hdfs.UnknownCipherSuiteException":{"name":"org.apache.hadoop.hdfs.UnknownCipherSuiteException","methods":{}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list
deleted file mode 100644
index 8887987..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-bin.list
+++ /dev/null
@@ -1 +0,0 @@
-hdfs

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list
deleted file mode 100644
index 8355c58..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs-jar.list
+++ /dev/null
@@ -1,25 +0,0 @@
-netty-3\.6\.2\.Final[\.\-_].*jar
-leveldbjni-all-1\.8[\.\-_].*jar
-jackson-core-asl-1\.9\.13[\.\-_].*jar
-jersey-server-1\.9[\.\-_].*jar
-htrace-core-3\.1\.0-incubating[\.\-_].*jar
-commons-daemon-1\.0\.13[\.\-_].*jar
-commons-cli-1\.2[\.\-_].*jar
-log4j-1\.2\.17[\.\-_].*jar
-jackson-mapper-asl-1\.9\.13[\.\-_].*jar
-guava-11\.0\.2[\.\-_].*jar
-jetty-6\.1\.26[\.\-_].*jar
-commons-logging-1\.1\.3[\.\-_].*jar
-jersey-core-1\.9[\.\-_].*jar
-asm-3\.2[\.\-_].*jar
-commons-codec-1\.4[\.\-_].*jar
-xml-apis-1\.3\.04[\.\-_].*jar
-xercesImpl-2\.9\.1[\.\-_].*jar
-xmlenc-0\.52[\.\-_].*jar
-commons-lang-2\.6[\.\-_].*jar
-netty-all-4\.0\.23\.Final[\.\-_].*jar
-jetty-util-6\.1\.26[\.\-_].*jar
-jsr305-3\.0\.0[\.\-_].*jar
-protobuf-java-2\.5\.0[\.\-_].*jar
-commons-io-2\.4[\.\-_].*jar
-servlet-api-2\.5[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list
deleted file mode 100644
index 12565fd..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-hdfs.list
+++ /dev/null
@@ -1,79 +0,0 @@
-webapps
-webapps/journal
-webapps/journal/index\.html
-webapps/journal/WEB-INF
-webapps/journal/WEB-INF/web\.xml
-webapps/secondary
-webapps/secondary/index\.html
-webapps/secondary/status\.html
-webapps/secondary/WEB-INF
-webapps/secondary/WEB-INF/web\.xml
-webapps/secondary/snn\.js
-webapps/hdfs
-webapps/hdfs/dfshealth\.html
-webapps/hdfs/index\.html
-webapps/hdfs/explorer\.js
-webapps/hdfs/dfshealth\.js
-webapps/hdfs/WEB-INF
-webapps/hdfs/WEB-INF/web\.xml
-webapps/hdfs/explorer\.html
-webapps/datanode
-webapps/datanode/index\.html
-webapps/datanode/robots\.txt
-webapps/datanode/WEB-INF
-webapps/datanode/WEB-INF/web\.xml
-webapps/nfs3
-webapps/nfs3/WEB-INF
-webapps/nfs3/WEB-INF/web\.xml
-webapps/static
-webapps/static/hadoop\.css
-webapps/static/bootstrap-3\.0\.2
-webapps/static/bootstrap-3\.0\.2/fonts
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.svg
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.eot
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.woff
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.ttf
-webapps/static/bootstrap-3\.0\.2/css
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.css
-webapps/static/bootstrap-3\.0\.2/js
-webapps/static/bootstrap-3\.0\.([2-9]|[3-9]\d+).*\.js
-webapps/static/jquery-1\.10\.([2-9]|[3-9]\d+).*\.js
-webapps/static/dust-helpers-1\.1\.([1-9]|[2-9]\d+).*\.js
-webapps/static/dust-full-2\.0\.\d+.*\.js
-webapps/static/dfs-dust\.js
-hadoop-hdfs\.jar
-bin
-bin/hdfs
-sbin
-sbin/distribute-exclude\.sh
-sbin/refresh-namenodes\.sh
-hadoop-hdfs-nfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
-hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
-hadoop-hdfs-2\.7\.([1-9]|[2-9]\d+).*\.jar
-hadoop-hdfs-nfs\.jar
-lib
-lib/commons-daemon-1\.0\.(1[3-9]|[2-9]\d).*\.jar
-lib/commons-cli-1\.([2-9]|[3-9]\d+).*\.jar
-lib/leveldbjni-all-1\.([8-9]|[9-9]\d+).*\.jar
-lib/commons-io-2\.([4-9]|[5-9]\d+).*\.jar
-lib/jetty-util-6\.1\.(2[6-9]|[3-9]\d).*\.jar
-lib/xercesImpl-2\.9\.([1-9]|[2-9]\d+).*\.jar
-lib/netty-3\.6\.([2-9]|[3-9]\d+).*\.jar
-lib/jersey-core-1\.(9|[1-9]\d+).*\.jar
-lib/jackson-mapper-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar
-lib/asm-3\.([2-9]|[3-9]\d+).*\.jar
-lib/netty-all-4\.0\.(2[3-9]|[3-9]\d).*\.jar
-lib/xmlenc-0\.(5[2-9]|[6-9]\d).*\.jar
-lib/commons-codec-1\.([4-9]|[5-9]\d+).*\.jar
-lib/jackson-core-asl-1\.9\.(1[3-9]|[2-9]\d).*\.jar
-lib/servlet-api-2\.([5-9]|[6-9]\d+).*\.jar
-lib/jetty-6\.1\.(2[6-9]|[3-9]\d).*\.jar
-lib/jersey-server-1\.(9|[1-9]\d+).*\.jar
-lib/xml-apis-1\.3\.(0[4-9]|[1-9]\d).*\.jar
-lib/log4j-1\.2\.(1[7-9]|[2-9]\d).*\.jar
-lib/htrace-core-3\.1\.\d+.*\.jar
-lib/protobuf-java-2\.5\.\d+.*\.jar
-lib/guava-11\.0\.([2-9]|[3-9]\d+).*\.jar
-lib/jsr305-3\.0\.\d+.*\.jar
-lib/commons-logging-1\.1\.([3-9]|[4-9]\d+).*\.jar
-lib/commons-lang-2\.([6-9]|[7-9]\d+).*\.jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list
deleted file mode 100644
index 0a7a9c5..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-mapreduce-bin.list
+++ /dev/null
@@ -1 +0,0 @@
-mapred


[07/50] [abbrv] bigtop git commit: renamed URL USER and PASSWD (cherry picked from commit 2933af9085f9315eb780ee9dbe7dee52fa1a89f3)

Posted by rv...@apache.org.
renamed URL USER and PASSWD
(cherry picked from commit 2933af9085f9315eb780ee9dbe7dee52fa1a89f3)


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/b0571dcb
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/b0571dcb
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/b0571dcb

Branch: refs/heads/master
Commit: b0571dcb8eacdd3a775a07845bfe73f20357d0f6
Parents: bbec845
Author: roypradeep <ro...@us.ibm.com>
Authored: Mon Oct 31 15:14:44 2016 -0700
Committer: Roman Shaposhnik <rv...@apache.org>
Committed: Thu Mar 23 10:27:10 2017 -0700

----------------------------------------------------------------------
 .../src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/b0571dcb/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
index dcd105b..6d34e47 100644
--- a/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
+++ b/bigtop-tests/spec-tests/runtime/src/test/java/org/odpi/specs/runtime/hive/TestBeeline.java
@@ -34,9 +34,9 @@ public class TestBeeline {
 
 	public static final Log LOG = LogFactory.getLog(TestBeeline.class.getName());
 
-	  private static final String URL = "odpiHiveTestBeelineUrl";
-	  private static final String USER = "odpiHiveTestBeelineUser";
-	  private static final String PASSWD = "odpiHiveTestBeelinePassword";
+	  private static final String URL = "odpiHiveTestJdbcUrl";
+	  private static final String USER = "odpiHiveTestJdbcUser";
+	  private static final String PASSWD = "odpiHiveTestJdbcPassword";
 	  
 	  private static Map<String, String> results;
 	  


[40/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
new file mode 100644
index 0000000..6ad5f18
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-api-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-api","version":"2.7.3","classes":{"org.apache.hadoop.yarn.api.records.ApplicationAccessType":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","methods":{"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ApplicationAccessType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAccessType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAccessType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","methods":{"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest newInstance
 (java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest","args":["java.lang.String","int","java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","methods":{"org.apache.hadoop.yarn.api.records.ContainerLaunchContext getContainerLaunchContext()":{"na
 me":"getContainerLaunchContext","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest newInstance(org.apache.hadoop.yarn.api.records.ContainerLaunchContext, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"na
 me":"setContainerLaunchContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","methods":{"void setBlacklistAdditions(java.util.List)":{"name":"setBlacklistAdditions","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getBlacklistRemovals()":{"name":"getBlacklistRemovals","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getBlacklistAdditions()":{"name":"getBlacklistAdditions","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest newInstance(java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":["java.util.List","java.util.List"],"exceptions":[]},"void setBlacklistRemovals(java.util.List)":{"name":"
 setBlacklistRemovals","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationAttemp
 tState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","methods":{"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState","args":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics":{"name":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":["int"],"exceptions":[]},"int getNumNodeManagers()":{"name":"getNumNodeManagers","returnType":"int","args":[],"exceptions":[]},"void setNumNodeManagers(int)":{"name":"
 setNumNodeManagers","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","methods":{"java.util.List getIncreaseRequests()":{"name":"getIncreaseRequests","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"void setAskList(java.util.List)":{"name":"setAskList","returnType":"void","args":["java.util.List"],"exception
 s":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"java.util.List getReleaseList()":{"name":"getReleaseList","returnType":"java.util.List","args":[],"exceptions":[]},"void setIncreaseRequests(java.util.List)":{"name":"setIncreaseRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest getResourceBlacklistRequest()":{"name":"getResourceBlacklistRequest","returnType":"org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest newInstance(int, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest","args":["int","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"voi
 d setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"void setResourceBlacklistRequest(org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest)":{"name":"setResourceBlacklistRequest","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest"],"exceptions":[]},"java.util.List getAskList()":{"name":"getAskList","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"void setReleaseList(java.util.List)":{"name":"setReleaseList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","methods":{"void setQueueInfo(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"setQueueInfo","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":
 []},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse newInstance(org.apache.hadoop.yarn.api.records.QueueInfo)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.records.QueueInfo"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo()":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationReport","methods":{"void setApplicationResourceUsageReport(org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport)":{"name":"setApplicationResourceUsageReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport"],"exceptions":[]},"long getFinishTime()":{"name":"getFinishTime","returnType":"long","args":[],"exceptions":[]},"void setFinalApplicationStatus(
 org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"void setUser(java.lang.String)":{"name":"setUser","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport getApplicationResourceUsageReport()":{"name":"getApplicationResourceUsageReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":[],"exceptions":[]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"jav
 a.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.ApplicationAttemptId, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int, org.apache.hadoop.yarn.api.records.Token, org.apache.hadoop.yarn.api.records.YarnApplicationState, java.lang.String, java.lang.String, long, long, org.apache.hadoop.yarn.api.records.FinalApplicationStatus, org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport, java.lang.String, float, java.lang.String, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.ApplicationAttemptId","java.lang.String","java.lang.String","java.lang.String","java.lang.String","int","org.apache.hadoop.yarn.api.records.Token","org.apache.hadoo
 p.yarn.api.records.YarnApplicationState","java.lang.String","java.lang.String","long","long","org.apache.hadoop.yarn.api.records.FinalApplicationStatus","org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","java.lang.String","float","java.lang.String","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationType(java.lang.String)":{"name":"setApplicationType","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getClientToAMToken()":{"name":"getClientToAMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"void setYarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState)":{"name":"setYarnApplicationState","returnType":"void","args":["org.apache.hadoo
 p.yarn.api.records.YarnApplicationState"],"exceptions":[]},"float getProgress()":{"name":"getProgress","returnType":"float","args":[],"exceptions":[]},"void setQueue(java.lang.String)":{"name":"setQueue","returnType":"void","args":["java.lang.String"],"exceptions":[]},"long getStartTime()":{"name":"getStartTime","returnType":"long","args":[],"exceptions":[]},"void setStartTime(long)":{"name":"setStartTime","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUser()":{"name":"getUser","returnType":"java.lang.String","args":[],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setOriginalTrackingUrl
 (java.lang.String)":{"name":"setOriginalTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"java.lang.String getQueue()":{"name":"getQueue","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState getYarnApplicationState()":{"name":"getYarnApplicationState","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setClientToAMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setClientToAMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"excepti
 ons":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"int getRpcPort()":{"name":"getRpcPort","returnType":"int","args":[],"exceptions":[]},"void setRpcPort(int)":{"name":"setRpcPort","returnType":"void","args":["int"],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]},"void setProgress(float)":{"name":"setProgress","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"ex
 ceptions":[]},"java.lang.String getOriginalTrackingUrl()":{"name":"getOriginalTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getApplicationType()":{"name":"getApplicationType","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFinishTime(long)":{"name":"setFinishTime","returnType":"void","args":["long"],"exceptions":[]},"void setCurrentApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"setCurrentApplicationAttemptId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getCurrentApplicationAttemptId()":{"name":"getCurrentApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Resource":{"name":"org.apache.hadoop.yarn.api.records.Resource","methods":{"int hashCode()":{"name":"hashCo
 de","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource newInstance(int, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":["int","int"],"exceptions":[]},"void setVirtualCores(int)":{"name":"setVirtualCores","returnType":"void","args":["int"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMemory(int)":{"name":"setMemory","returnType":"void","args":["int"],"exceptions":[]},"int getMemory()":{"name":"getMemory","returnType":"int","args":[],"exceptions":[]},"int getVirtualCores()":{"name":"getVirtualCores","returnType":"int","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeReport":{"name":"org.apache.hadoop.yarn.api.records.NodeReport","methods":{"void setCapability(org.apache.hado
 op.yarn.api.records.Resource)":{"name":"setCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsed()":{"name":"getUsed","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"long getLastHealthReportTime()":{"name":"getLastHealthReportTime","returnType":"long","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void setNodeLabels(java.util.Set)":{"name":"setNodeLabels","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.hadoop.yarn.ap
 i.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setHealthReport(java.lang.String)":{"name":"setHealthReport","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setRackName(java.lang.String)":{"name":"setRackName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setLastHealthReportTime(long)":{"name":"setLastHealthReportTime","returnType":"void","args":["long"],"exceptions":[]},"void setHttpAddress(java.lang.String)":{"name":"setHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRackName()":{"name":"getRackName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setUsed(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsed","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"java.lang.String getHealthReport()":{"name":"getHealthRe
 port","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNodeState(org.apache.hadoop.yarn.api.records.NodeState)":{"name":"setNodeState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeState"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeState getNodeState()":{"name":"getNodeState"
 ,"returnType":"org.apache.hadoop.yarn.api.records.NodeState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeReport newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.NodeState, java.lang.String, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, int, java.lang.String, long, java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeReport","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.NodeState","java.lang.String","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","int","java.lang.String","long","java.util.Set"],"exceptions":[]},"java.lang.String getHttpAddress()":{"name":"getHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"
 exceptions":[]},"java.util.Set getNodeLabels()":{"name":"getNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Priority":{"name":"org.apache.hadoop.yarn.api.records.Priority","methods":{"void setPriority(int)":{"name":"setPriority","returnType":"void","args":["int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.Priority)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority newInstance(int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":["int"],"exceptions":[]},"int getPriority()":{"name":"getPriority","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)
 ":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationAttemptId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"int getAttemptId()":{"name":"getAttemptId","returnType":"int","args":[],"except
 ions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":["org.apache.hadoop.yarn.api.records.ApplicationId","int"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NMToken":{"name":"org.apache.hadoop.yarn.api.records.NMToken","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void setToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":
 "org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getToken()":{"name":"getToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NMToken newInstance(org.apache.hadoop.yarn.api.records.NodeId, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NMToken","args":["org.apache.hadoop.yarn.api.records.NodeId","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicati
 onMasterRequest","methods":{"void setFinalApplicationStatus(org.apache.hadoop.yarn.api.records.FinalApplicationStatus)":{"name":"setFinalApplicationStatus","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus getFinalApplicationStatus()":{"name":"getFinalApplicationStatus","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":[],"exceptions":[]},"void setTrackingUrl(java.lang.String)":{"name":"setTrackingUrl","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest newInstance(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String",
 "java.lang.String"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrackingUrl()":{"name":"getTrackingUrl","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getDiagnostics()":{"name":"getDiagnostics","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetC
 lusterMetricsRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","methods":{"java.util.List getStartContainerRequests()":{"name":"getStartContainerRequests","returnType":"java.util.List","args":[],"exceptions":[]},"void setStartContainerRequests(java.util.List)":{"name":"setStartContainerRequests","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext":{"name":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","methods":{"ja
 va.util.Map getApplicationACLs()":{"name":"getApplicationACLs","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.Map getServiceData()":{"name":"getServiceData","returnType":"java.util.Map","args":[],"exceptions":[]},"void setApplicationACLs(java.util.Map)":{"name":"setApplicationACLs","returnType":"void","args":["java.util.Map"],"exceptions":[]},"java.util.Map getLocalResources()":{"name":"getLocalResources","returnType":"java.util.Map","args":[],"exceptions":[]},"void setServiceData(java.util.Map)":{"name":"setServiceData","returnType":"void","args":["java.util.Map"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerLaunchContext newInstance(java.util.Map, java.util.Map, java.util.List, java.util.Map, java.nio.ByteBuffer, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerLaunchContext","args":["java.util.Map","java.util.Map","java.util.List","java.util.Map","java.nio.ByteBuffer","java.util.Map"],"exceptions"
 :[]},"java.util.Map getEnvironment()":{"name":"getEnvironment","returnType":"java.util.Map","args":[],"exceptions":[]},"java.util.List getCommands()":{"name":"getCommands","returnType":"java.util.List","args":[],"exceptions":[]},"java.nio.ByteBuffer getTokens()":{"name":"getTokens","returnType":"java.nio.ByteBuffer","args":[],"exceptions":[]},"void setLocalResources(java.util.Map)":{"name":"setLocalResources","returnType":"void","args":["java.util.Map"],"exceptions":[]},"void setCommands(java.util.List)":{"name":"setCommands","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setTokens(java.nio.ByteBuffer)":{"name":"setTokens","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"void setEnvironment(java.util.Map)":{"name":"setEnvironment","returnType":"void","args":["java.util.Map"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportReque
 st","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","methods":{"void setFailedRequests(java.util.Map)":{"name":"setFailedRequests","returnType":"void","args":["java.util.Map"],"excep
 tions":[]},"void setSuccessfullyStoppedContainers(java.util.List)":{"name":"setSuccessfullyStoppedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.util.List getSuccessfullyStoppedContainers()":{"name":"getSuccessfullyStoppedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse newInstance(java.util.List, java.util.Map)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["java.util.List","java.util.Map"],"exceptions":[]},"java.util.Map getFailedRequests()":{"name":"getFailedRequests","returnType":"java.util.Map","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueState":{"name":"org.apache.hadoop.yarn.api.records.QueueState","methods":{"org.apache.hadoop.yarn.api.records.QueueState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":["
 java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.QueueState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.QueueState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationId":{"name":"org.apache.hadoop.yarn.api.records.ApplicationId","methods":{"org.apache.hadoop.yarn.api.records.ApplicationId newInstance(long, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["long","int"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"long getClusterTimestamp()":{"name":"getClusterTimestamp","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"
 ],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","methods":{"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getClusterMetrics()":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.records.YarnClusterMetrics","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse newInstance(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exc
 eptions":[]},"void setClusterMetrics(org.apache.hadoop.yarn.api.records.YarnClusterMetrics)":{"name":"setClusterMetrics","returnType":"void","args":["org.apache.hadoop.yarn.api.records.YarnClusterMetrics"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["java.util.List"],"exceptions":[]},"java.util.List getUserAclsInfoList()":{"name":"getUserAclsInfoList","returnType":"java.util.List","args":[],"exceptions":[]},"void setUserAclsInfoList(java.util.List)":{"name":"setUserAclsInfoList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus":{"name":"org.apache.hadoop.
 yarn.api.records.FinalApplicationStatus","methods":{"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.FinalApplicationStatus;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.FinalApplicationStatus valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.FinalApplicationStatus","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","methods":{"java.util.List getApplicationList()":{"name":"getApplicationList","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse","args":["java.util.List"],"exceptions":[]},"voi
 d setApplicationList(java.util.List)":{"name":"setApplicationList","returnType":"void","args":["java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.YarnApplicationState":{"name":"org.apache.hadoop.yarn.api.records.YarnApplicationState","methods":{"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.YarnApplicationState;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.YarnApplicationState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.YarnApplicationState","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.URL":{"name":"org.apache.hadoop.yarn.api.records.URL","methods":{"java.lang.String getFile()":{"name":"getFile","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPort(int)":{"name":"setPort","returnType":"void","args":["int"],"exceptions":[]},"void setUserInfo(java.la
 ng.String)":{"name":"setUserInfo","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setHost(java.lang.String)":{"name":"setHost","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setScheme(java.lang.String)":{"name":"setScheme","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"void setFile(java.lang.String)":{"name":"setFile","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"java.lang.String getUserInfo()":{"name":"getUserInfo","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.URL newInstance(java.lang.String, java.lang.String, int, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.URL","args":["java.lang.String","
 java.lang.String","int","java.lang.String"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","methods":{"void setIsUnregistered(boolean)":{"name":"setIsUnregistered","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["boolean"],"exceptions":[]},"boolean getIsUnregistered()":{"name":"getIsUnregistered","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationMasterProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationMasterProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse fi
 nishApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"finishApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.Yarn
 Exception","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","methods":{"void setApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"setApplicationReport","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport()":{"name":"getApplicationReport","returnType
 ":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationReport)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationReport"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds"
 ,"returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","methods":{"void setStartRange(long, long) throws java.lang.IllegalArgumentException":{"name":"setStartRange","returnType":"void","args":["long","long"],"exceptions":["java.lang.IllegalArgumentException"]},"java.util.Set getApplicationTags()":{"name":"getApplicationTags","returnType":"java.util.Set","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope getScope()":{"name":"getScope","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","args":[],"exceptions":[]},"void setScope(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"setScope","returnType":"void","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protoco
 lrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope, java.util.Set, java.util.Set, java.util.Set, java.util.Set, java.util.EnumSet, org.apache.commons.lang.math.LongRange, org.apache.commons.lang.math.LongRange, java.lang.Long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope","java.util.Set","java.util.Set","java.util.Set","java.util.Set","java.util.EnumSet","org.apache.commons.lang.math.LongRange","org.apache.commons.lang.math.LongRange","java.lang.Lo
 ng"],"exceptions":[]},"long getLimit()":{"name":"getLimit","returnType":"long","args":[],"exceptions":[]},"java.util.EnumSet getApplicationStates()":{"name":"getApplicationStates","returnType":"java.util.EnumSet","args":[],"exceptions":[]},"void setFinishRange(org.apache.commons.lang.math.LongRange)":{"name":"setFinishRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"void setUsers(java.util.Set)":{"name":"setUsers","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getFinishRange()":{"name":"getFinishRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTags(java.util.Set)":{"name":"setApplicationTags","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setApplicationStates(java.util.EnumSet)":{"name":"setApplicationStates","returnType":"void","args":["java.util.EnumSet"],"exceptions":[]},"org.apache.hadoop.yarn.api.pro
 tocolrecords.GetApplicationsRequest newInstance(java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.EnumSet"],"exceptions":[]},"java.util.Set getQueues()":{"name":"getQueues","returnType":"java.util.Set","args":[],"exceptions":[]},"java.util.Set getUsers()":{"name":"getUsers","returnType":"java.util.Set","args":[],"exceptions":[]},"void setLimit(long)":{"name":"setLimit","returnType":"void","args":["long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set, java.util.EnumSet)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set","java.util.EnumSet"],"exceptions":[]},"void setApplicationStates(java.util.Set)":{"name":"setApplicationStates","returnType":"void","args":["java.util.Set"],"exceptions":[]},"org.apache.commons.lang.math.LongRange getStartRange()":{"name":
 "getStartRange","returnType":"org.apache.commons.lang.math.LongRange","args":[],"exceptions":[]},"void setApplicationTypes(java.util.Set)":{"name":"setApplicationTypes","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setQueues(java.util.Set)":{"name":"setQueues","returnType":"void","args":["java.util.Set"],"exceptions":[]},"void setFinishRange(long, long)":{"name":"setFinishRange","returnType":"void","args":["long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance(java.util.Set)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":["java.util.Set"],"exceptions":[]},"void setStartRange(org.apache.commons.lang.math.LongRange)":{"name":"setStartRange","returnType":"void","args":["org.apache.commons.lang.math.LongRange"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest newInstance()":{"name":"newInstance","returnType":"org
 .apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest","args":[],"exceptions":[]},"java.util.Set getApplicationTypes()":{"name":"getApplicationTypes","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceType":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceType","methods":{"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceType;","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.LocalResourceType valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceType","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport":{"name":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","methods":{"long getVcoreSeconds()":{"name":"getVcoreSeconds","returnType":"long","args":[],"exceptions":[]},"i
 nt getNumUsedContainers()":{"name":"getNumUsedContainers","returnType":"int","args":[],"exceptions":[]},"long getMemorySeconds()":{"name":"getMemorySeconds","returnType":"long","args":[],"exceptions":[]},"void setMemorySeconds(long)":{"name":"setMemorySeconds","returnType":"void","args":["long"],"exceptions":[]},"void setUsedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setUsedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setNeededResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setNeededResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getReservedResources()":{"name":"getReservedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getNeededResources()":{"name":"getNeededResources","returnType":"org.apa
 che.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setNumUsedContainers(int)":{"name":"setNumUsedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setNumReservedContainers(int)":{"name":"setNumReservedContainers","returnType":"void","args":["int"],"exceptions":[]},"void setVcoreSeconds(long)":{"name":"setVcoreSeconds","returnType":"void","args":["long"],"exceptions":[]},"int getNumReservedContainers()":{"name":"getNumReservedContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport newInstance(int, int, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource, long, long)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport","args":["int","int","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop
 .yarn.api.records.Resource","long","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getUsedResources()":{"name":"getUsedResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setReservedResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setReservedResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest newInstance(java.lang.String, boolean, boolean, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest","args":["java.lang.String","boolean","boolean","boolean"],"excep
 tions":[]},"boolean getRecursive()":{"name":"getRecursive","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setIncludeChildQueues(boolean)":{"name":"setIncludeChildQueues","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIncludeApplications()":{"name":"getIncludeApplications","returnType":"boolean","args":[],"exceptions":[]},"boolean getIncludeChildQueues()":{"name":"getIncludeChildQueues","returnType":"boolean","args":[],"exceptions":[]},"void setRecursive(boolean)":{"name":"setRecursive","returnType":"void","args":["boolean"],"exceptions":[]},"void setIncludeApplications(boolean)":{"name":"setIncludeApplications","returnType":"void","args":["boolean"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","methods":{"void setIncreasedContainers(ja
 va.util.List)":{"name":"setIncreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setDecreasedContainers(java.util.List)":{"name":"setDecreasedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, java.util.List, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","java.util.List","java.util.List"],"exceptions":[]},"void setUpdatedNodes(java.util.Li
 st)":{"name":"setUpdatedNodes","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setResponseId(int)":{"name":"setResponseId","returnType":"void","args":["int"],"exceptions":[]},"java.util.List getNMTokens()":{"name":"getNMTokens","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getUpdatedNodes()":{"name":"getUpdatedNodes","returnType":"java.util.List","args":[],"exceptions":[]},"java.util.List getIncreasedContainers()":{"name":"getIncreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.AMCommand getAMCommand()":{"name":"getAMCommand","returnType":"org.apache.hadoop.yarn.api.records.AMCommand","args":[],"exceptions":[]},"void setNMTokens(java.util.List)":{"name":"setNMTokens","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getAMRMToken()":{"name":"getAMRMToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"e
 xceptions":[]},"void setAMCommand(org.apache.hadoop.yarn.api.records.AMCommand)":{"name":"setAMCommand","returnType":"void","args":["org.apache.hadoop.yarn.api.records.AMCommand"],"exceptions":[]},"void setAllocatedContainers(java.util.List)":{"name":"setAllocatedContainers","returnType":"void","args":["java.util.List"],"exceptions":[]},"int getNumClusterNodes()":{"name":"getNumClusterNodes","returnType":"int","args":[],"exceptions":[]},"void setNumClusterNodes(int)":{"name":"setNumClusterNodes","returnType":"void","args":["int"],"exceptions":[]},"void setCompletedContainersStatuses(java.util.List)":{"name":"setCompletedContainersStatuses","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setAMRMToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setAMRMToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.PreemptionMessage getPreemptionMessage()":{"name":"getPreemptionMessage","re
 turnType":"org.apache.hadoop.yarn.api.records.PreemptionMessage","args":[],"exceptions":[]},"java.util.List getCompletedContainersStatuses()":{"name":"getCompletedContainersStatuses","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setAvailableResources(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setAvailableResources","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List, org.apache.hadoop.yarn.api.records.Token, java.util.List, java.util.List)":{"na
 me":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List","org.apache.hadoop.yarn.api.records.Token","java.util.List","java.util.List"],"exceptions":[]},"void setPreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage)":{"name":"setPreemptionMessage","returnType":"void","args":["org.apache.hadoop.yarn.api.records.PreemptionMessage"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse newInstance(int, java.util.List, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.AMCommand, int, org.apache.hadoop.yarn.api.records.PreemptionMessage, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.pro
 tocolrecords.AllocateResponse","args":["int","java.util.List","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.AMCommand","int","org.apache.hadoop.yarn.api.records.PreemptionMessage","java.util.List"],"exceptions":[]},"java.util.List getAllocatedContainers()":{"name":"getAllocatedContainers","returnType":"java.util.List","args":[],"exceptions":[]},"int getResponseId()":{"name":"getResponseId","returnType":"int","args":[],"exceptions":[]},"java.util.List getDecreasedContainers()":{"name":"getDecreasedContainers","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.LocalResourceVisibility":{"name":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","methods":{"org.apache.hadoop.yarn.api.records.LocalResourceVisibility valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.LocalResourceVisibility","args":["java.lang.String"],"excep
 tions":[]},"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.LocalResourceVisibility;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setUserAcls(java.util.List)":{"name":"setUserAcls","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getUserAcls()":{"name":"getUserAcls","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueUserACLInfo newInstance(java.lang.String, java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueUserACLInfo","a
 rgs":["java.lang.String","java.util.List"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","methods":{"void setRenewer(java.lang.String)":{"name":"setRenewer","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest newInstance(java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRenewer()":{"name":"getRenewer","returnType":"java.lang.String","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerState":{"name":"org.apache.hadoop.yarn.api.records.ContainerState","methods":{"org.apache.hadoop.yarn.api.records.ContainerState valueOf(java.lang.String)":{"name":"valueOf","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","a
 rgs":["java.lang.String"],"exceptions":[]},"[Lorg.apache.hadoop.yarn.api.records.ContainerState; values()":{"name":"values","returnType":"[Lorg.apache.hadoop.yarn.api.records.ContainerState;","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest newInstance()":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ContainerManagementProtocol":{"name":"org.apache.hadoop.yarn.api.ContainerManagementProtocol","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse getContainerStatuses(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerSt
 atuses","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse startContainers(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse stopContainers(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name
 ":"stopContainers","returnType":"org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.exceptions.YarnException":{"name":"org.apache.hadoop.yarn.exceptions.YarnException","methods":{}},"org.apache.hadoop.yarn.api.records.QueueInfo":{"name":"org.apache.hadoop.yarn.api.records.QueueInfo","methods":{"void setQueueName(java.lang.String)":{"name":"setQueueName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void setCurrentCapacity(float)":{"name":"setCurrentCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setCapacity(float)":{"name":"setCapacity","returnType":"void","args":["float"],"exceptions":[]},"java.lang.String getQueueName()":{"name":"getQueueName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.List getChildQueues()":{"
 name":"getChildQueues","returnType":"java.util.List","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueInfo newInstance(java.lang.String, float, float, float, java.util.List, java.util.List, org.apache.hadoop.yarn.api.records.QueueState, java.util.Set, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String","float","float","float","java.util.List","java.util.List","org.apache.hadoop.yarn.api.records.QueueState","java.util.Set","java.lang.String"],"exceptions":[]},"void setDefaultNodeLabelExpression(java.lang.String)":{"name":"setDefaultNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.util.List getApplications()":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":[]},"float getCapacity()":{"name":"getCapacity","returnType":"float","args":[],"exceptions":[]},"float getCurrentCapacity()":{"name":"getCurrentCapacity","returnType
 ":"float","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.QueueState getQueueState()":{"name":"getQueueState","returnType":"org.apache.hadoop.yarn.api.records.QueueState","args":[],"exceptions":[]},"void setChildQueues(java.util.List)":{"name":"setChildQueues","returnType":"void","args":["java.util.List"],"exceptions":[]},"void setApplications(java.util.List)":{"name":"setApplications","returnType":"void","args":["java.util.List"],"exceptions":[]},"java.lang.String getDefaultNodeLabelExpression()":{"name":"getDefaultNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"void setMaximumCapacity(float)":{"name":"setMaximumCapacity","returnType":"void","args":["float"],"exceptions":[]},"void setQueueState(org.apache.hadoop.yarn.api.records.QueueState)":{"name":"setQueueState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.QueueState"],"exceptions":[]},"void setAccessibleNodeLabels(java.util.Set)":{"name":"setAccessibleNodeLabels
 ","returnType":"void","args":["java.util.Set"],"exceptions":[]},"float getMaximumCapacity()":{"name":"getMaximumCapacity","returnType":"float","args":[],"exceptions":[]},"java.util.Set getAccessibleNodeLabels()":{"name":"getAccessibleNodeLabels","returnType":"java.util.Set","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","methods":{"void setRMDelegationToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setRMDelegationToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse newInstance(org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.To
 ken getRMDelegationToken()":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.NodeId":{"name":"org.apache.hadoop.yarn.api.records.NodeId","methods":{"int compareTo(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId newInstance(java.lang.String, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":["java.lang.String","int"],"exceptions":[]},"java.lang.String getHost()":{"name":"getHost","returnType":"java.lang.String","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int",
 "args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"int getPort()":{"name":"getPort","returnType":"int","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","methods":{"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest newInstance(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":[]},"void setApplicationSubmissionContext(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext)":{"name":"setApplicationSubmissionContext","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"excep
 tions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse":{"name":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse newInstance(boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["boolean"],"exceptions":[]},"void setIsKillCompleted(boolean)":{"name":"setIsKillCompleted","returnType":"void","args":["boolean"],"exceptions":[]},"boolean getIsKillCompleted()":{"name":"getIsKillCompleted","returnType":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.ApplicationClientProtocol":{"name":"org.apache.hadoop.yarn.api.ApplicationClientProtocol"
 ,"methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplication(org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNewApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse getClusterNodes(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse getLabelsToNodes(org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest"],"exceptions":["org.apache.hadoop.yar
 n.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop
 .yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse getQueueUserAcls(org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueUserAcls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicat
 ionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse forceKillApplication(org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"forceKillApplication","returnType":"org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse getNodeToLabels(org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeToLabels","returnType":"org.apache.hadoop.yarn.api.protocolr
 ecords.GetNodesToLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse getQueueInfo(org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.ha
 doop.yarn.api.protocolrecords.GetQueueInfoResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse getClusterMetrics(org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterMetrics","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse getClusterNodeLabels(org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabe
 ls","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","methods":{"void setContainerIds(java.util.List)":{"name":"setContainerIds","returnType":"void","args":["java.util.List"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest newInstance(java.util.List)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest","args":["java.util.List"],"exceptions":[]},"java.util.List getContainerIds()":{"name":"getContainerIds","returnType":"java.util.List","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse
 ":{"name":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse newInstance(org.apache.hadoop.yarn.api.records.ApplicationId, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.records.Resource)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":["org.apache.hadoop.yarn.api.records.ApplicationId","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationId getApplicationId()":{"name":"getApplicationId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":[],"exceptions":[]},"void setApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)":{"name":"setApplicationId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":[]},"org.apache.hadoop.ya
 rn.api.records.Resource getMaximumResourceCapability()":{"name":"getMaximumResourceCapability","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void setMaximumResourceCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setMaximumResourceCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerId":{"name":"org.apache.hadoop.yarn.api.records.ContainerId","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newInstance(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","int"],"exceptions":[]
 },"int getId()":{"name":"getId","returnType":"int","args":[],"exceptions":[]},"long getContainerId()":{"name":"getContainerId","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId newContainerId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId, long)":{"name":"newContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId","long"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptId getApplicationAttemptId()":{"name":"getApplicationAttemptId","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId fromString(java.lang.String)":{"name":"fromString","returnType":"org.apache.
 hadoop.yarn.api.records.ContainerId","args":["java.lang.String"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.Container":{"name":"org.apache.hadoop.yarn.api.records.Container","methods":{"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"java.lang.String getNodeHttpAddress()":{"name":"getNodeHttpAddress","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.NodeId getNodeId()":{"name":"getNodeId","returnType":"org.apache.hadoop.yarn.api.records.NodeId","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getId()":{"name":"getId","returnType":"org.apache.hadoop.y
 arn.api.records.ContainerId","args":[],"exceptions":[]},"void setContainerToken(org.apache.hadoop.yarn.api.records.Token)":{"name":"setContainerToken","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setNodeId(org.apache.hadoop.yarn.api.records.NodeId)":{"name":"setNodeId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Token getContainerToken()":{"name":"getContainerToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getResource()":{"name":"getResource","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Container newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, org.apache.hadoop.yarn.api.record
 s.Priority, org.apache.hadoop.yarn.api.records.Token)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.Container","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","org.apache.hadoop.yarn.api.records.Priority","org.apache.hadoop.yarn.api.records.Token"],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setResource(org.apache.hadoop.yarn.api.records.Resource)":{"name":"setResource","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void setId(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"setId","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNodeHttpAddress(java.lang.String)":{"name":"setNod
 eHttpAddress","returnType":"void","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ResourceRequest":{"name":"org.apache.hadoop.yarn.api.records.ResourceRequest","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean, java.lang.String)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean","java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Priority getPriority()":{"name":"getPriority","returnType":"org.apache.hadoop.yarn.api.records.Priority","args":[],"exceptions":[]},"void setCapability(org.apache.hadoop.yarn.api.records.Resource)":{"name":"s
 etCapability","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"int getNumContainers()":{"name":"getNumContainers","returnType":"int","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int"],"exceptions":[]},"void setRelaxLocality(boolean)":{"name":"setRelaxLocality","returnType":"void","args":["boolean"],"exceptions":[]},"void setResourceName(java.lang.String)":{"name":"setResourceName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.Resource getCapability()":{"name":"getCapability","returnType":"org.apache.hadoop.yarn.api.records.R
 esource","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ResourceRequest newInstance(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource, int, boolean)":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ResourceRequest","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource","int","boolean"],"exceptions":[]},"void setNodeLabelExpression(java.lang.String)":{"name":"setNodeLabelExpression","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getNodeLabelExpression()":{"name":"getNodeLabelExpression","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean getR
 elaxLocality()":{"name":"getRelaxLocality","returnType":"boolean","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.yarn.api.records.ResourceRequest)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.yarn.api.records.ResourceRequest"],"exceptions":[]},"java.lang.String getResourceName()":{"name":"getResourceName","returnType":"java.lang.String","args":[],"exceptions":[]},"void setPriority(org.apache.hadoop.yarn.api.records.Priority)":{"name":"setPriority","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Priority"],"exceptions":[]},"void setNumContainers(int)":{"name":"setNumContainers","returnType":"void","args":["int"],"exceptions":[]},"boolean isAnyLocation(java.lang.String)":{"name":"isAnyLocation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.yarn.api.records.ContainerStatus":{"name":"org.apache.hadoop.yarn.api.records.ContainerStatus","methods":{"org.apache.hadoop.yarn.api.records.ContainerStat
 e getState()":{"name":"getState","returnType":"org.apache.hadoop.yarn.api.records.ContainerState","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerId getContainerId()":{"name":"getContainerId","returnType":"org.apache.hadoop.yarn.api.records.ContainerId","args":[],"exceptions":[]},"int getExitStatus()":{"name":"getExitStatus","returnType":"int","args":[],"exceptions":[]},"void setExitStatus(int)":{"name":"setExitStatus","returnType":"void","args":["int"],"exceptions":[]},"void setState(org.apache.hadoop.yarn.api.records.ContainerState)":{"name":"setState","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerState"],"exceptions":[]},"void setDiagnostics(java.lang.String)":{"name":"setDiagnostics","returnType":"void","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerStatus newInstance(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.ContainerState, java.lang.String, int
 )":{"name":"newInstance","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","o

<TRUNCATED>

[45/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
new file mode 100644
index 0000000..0ea49ce
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestHCatalog.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hive.hcatalog.data.schema.HCatSchema;
+import org.apache.thrift.TException;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+
+public class TestHCatalog {
+  private static final String JOBJAR = "odpi.test.hive.hcat.job.jar";
+  private static final String HCATCORE = "odpi.test.hive.hcat.core.jar";
+
+  private static final Log LOG = LogFactory.getLog(TestHCatalog.class.getName());
+
+  private static IMetaStoreClient client = null;
+  private static HiveConf conf;
+  private static HCatSchema inputSchema;
+  private static HCatSchema outputSchema;
+
+  private Random rand;
+
+  @BeforeClass
+  public static void connect() throws MetaException {
+    if (JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog ")) {
+      String hiveConfDir = JdbcConnector.getProperty(JdbcConnector.HIVE_CONF_DIR,
+          "Hive conf directory ");
+      String hadoopConfDir = JdbcConnector.getProperty(JdbcConnector.HADOOP_CONF_DIR,
+          "Hadoop conf directory ");
+      conf = new HiveConf();
+      String fileSep = System.getProperty("file.separator");
+      conf.addResource(new Path(hadoopConfDir + fileSep + "core-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "hdfs-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "yarn-site.xml"));
+      conf.addResource(new Path(hadoopConfDir + fileSep + "mapred-site.xml"));
+      conf.addResource(new Path(hiveConfDir + fileSep + "hive-site.xml"));
+      client = new HiveMetaStoreClient(conf);
+
+    }
+  }
+
+  @Before
+  public void checkIfActive() {
+    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_HCATALOG, "Test HCatalog "));
+    rand = new Random();
+  }
+
+  @Test
+  public void hcatInputFormatOutputFormat() throws TException, IOException, ClassNotFoundException,
+      InterruptedException, URISyntaxException {
+    // Create a table to write to
+    final String inputTable = "odpi_hcat_input_table_" + rand.nextInt(Integer.MAX_VALUE);
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema schema = new FieldSchema("line", "string", "");
+    inputSchema = new HCatSchema(Collections.singletonList(new HCatFieldSchema(schema.getName(),
+        HCatFieldSchema.Type.STRING, schema.getComment())));
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(schema), null,
+        "org.apache.hadoop.mapred.TextInputFormat",
+        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Table table = new Table(inputTable, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+
+    final String outputTable = "odpi_hcat_output_table_" + rand.nextInt(Integer.MAX_VALUE);
+    sd = new StorageDescriptor(Arrays.asList(
+          new FieldSchema("word", "string", ""),
+          new FieldSchema("count", "int", "")),
+        null, "org.apache.hadoop.mapred.TextInputFormat",
+        "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat", false, 0, serde, null, null,
+        new HashMap<String, String>());
+    table = new Table(outputTable, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+    outputSchema = new HCatSchema(Arrays.asList(
+        new HCatFieldSchema("word", HCatFieldSchema.Type.STRING, ""),
+        new HCatFieldSchema("count", HCatFieldSchema.Type.INT, "")));
+
+    // LATER Could I use HCatWriter here and the reader to read it?
+    // Write some stuff into a file in the location of the table
+    table = client.getTable("default", inputTable);
+    String inputFile = table.getSd().getLocation() + "/input";
+    Path inputPath = new Path(inputFile);
+    FileSystem fs = FileSystem.get(conf);
+    FSDataOutputStream out = fs.create(inputPath);
+    out.writeChars("Mary had a little lamb\n");
+    out.writeChars("its fleece was white as snow\n");
+    out.writeChars("and everywhere that Mary went\n");
+    out.writeChars("the lamb was sure to go\n");
+    out.close();
+
+    Map<String, String> env = new HashMap<>();
+    env.put("HADOOP_CLASSPATH", System.getProperty(HCATCORE, ""));
+    Map<String, String> results = HiveHelper.execCommand(new CommandLine("hive")
+        .addArgument("--service")
+        .addArgument("jar")
+        .addArgument(System.getProperty(JOBJAR))
+        .addArgument(HCatalogMR.class.getName())
+        .addArgument("-it")
+        .addArgument(inputTable)
+        .addArgument("-ot")
+        .addArgument(outputTable)
+        .addArgument("-is")
+        .addArgument(inputSchema.getSchemaAsTypeString())
+        .addArgument("-os")
+        .addArgument(outputSchema.getSchemaAsTypeString()), env);
+    LOG.info(results.toString());
+    Assert.assertEquals("HCat job failed", 0, Integer.parseInt(results.get("exitValue")));
+
+    client.dropTable("default", inputTable);
+    client.dropTable("default", outputTable);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
new file mode 100644
index 0000000..154fd9c
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestJdbc.java
@@ -0,0 +1,545 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.Statement;
+import java.sql.Types;
+
+public class TestJdbc extends JdbcConnector {
+  private static final Log LOG = LogFactory.getLog(TestJdbc.class.getName());
+
+  /**
+   * Test simple non-statement related class.  setSchema is tested elsewhere because there's work
+   * to do for that one.  Similarly with getMetadata.
+   * @throws SQLException
+   */
+  @Test
+  public void nonStatementCalls() throws SQLException {
+    conn.clearWarnings();
+
+    boolean isAutoCommit = conn.getAutoCommit();
+    LOG.debug("Auto commit is " + isAutoCommit);
+
+    String catalog = conn.getCatalog();
+    LOG.debug("Catalog is " + catalog);
+
+    String schema = conn.getSchema();
+    LOG.debug("Schema is " + schema);
+
+    int txnIsolation = conn.getTransactionIsolation();
+    LOG.debug("Transaction Isolation is " + txnIsolation);
+
+    SQLWarning warning = conn.getWarnings();
+    while (warning != null) {
+      LOG.debug("Found a warning: " + warning.getMessage());
+      warning = warning.getNextWarning();
+    }
+
+    boolean closed = conn.isClosed();
+    LOG.debug("Is closed? " + closed);
+
+    boolean readOnly = conn.isReadOnly();
+    LOG.debug("Is read only?" + readOnly);
+
+    // Hive doesn't support catalogs, so setting this to whatever should be fine.  If we have
+    // non-Hive systems trying to pass this setting it to a non-valid catalog name may cause
+    // issues, so we may need to make this value configurable or something.
+    conn.setCatalog("fred");
+  }
+
+  /**
+   * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
+   * that on a valid table.  Same with getFunctions.
+   * @throws SQLException
+   */
+  @Test
+  public void databaseMetaDataCalls() throws SQLException {
+    DatabaseMetaData md = conn.getMetaData();
+
+    boolean boolrc = md.allTablesAreSelectable();
+    LOG.debug("All tables are selectable? " + boolrc);
+
+    String strrc = md.getCatalogSeparator();
+    LOG.debug("Catalog separator " + strrc);
+
+    strrc = md.getCatalogTerm();
+    LOG.debug("Catalog term " + strrc);
+
+    ResultSet rs = md.getCatalogs();
+    while (rs.next()) {
+      strrc = rs.getString(1);
+      LOG.debug("Found catalog " + strrc);
+    }
+
+    Connection c = md.getConnection();
+
+    int intrc = md.getDatabaseMajorVersion();
+    LOG.debug("DB major version is " + intrc);
+
+    intrc = md.getDatabaseMinorVersion();
+    LOG.debug("DB minor version is " + intrc);
+
+    strrc = md.getDatabaseProductName();
+    LOG.debug("DB product name is " + strrc);
+
+    strrc = md.getDatabaseProductVersion();
+    LOG.debug("DB product version is " + strrc);
+
+    intrc = md.getDefaultTransactionIsolation();
+    LOG.debug("Default transaction isolation is " + intrc);
+
+    intrc = md.getDriverMajorVersion();
+    LOG.debug("Driver major version is " + intrc);
+
+    intrc = md.getDriverMinorVersion();
+    LOG.debug("Driver minor version is " + intrc);
+
+    strrc = md.getDriverName();
+    LOG.debug("Driver name is " + strrc);
+
+    strrc = md.getDriverVersion();
+    LOG.debug("Driver version is " + strrc);
+
+    strrc = md.getExtraNameCharacters();
+    LOG.debug("Extra name characters is " + strrc);
+
+    strrc = md.getIdentifierQuoteString();
+    LOG.debug("Identifier quote string is " + strrc);
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getImportedKeys("a", "b", "d");
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getIndexInfo("a", "b", "d", true, true);
+
+    intrc = md.getJDBCMajorVersion();
+    LOG.debug("JDBC major version is " + intrc);
+
+    intrc = md.getJDBCMinorVersion();
+    LOG.debug("JDBC minor version is " + intrc);
+
+    intrc = md.getMaxColumnNameLength();
+    LOG.debug("Maximum column name length is " + intrc);
+
+    strrc = md.getNumericFunctions();
+    LOG.debug("Numeric functions are " + strrc);
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getPrimaryKeys("a", "b", "d");
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getProcedureColumns("a", "b", "d", "e");
+
+    strrc = md.getProcedureTerm();
+    LOG.debug("Procedures are called " + strrc);
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getProcedures("a", "b", "d");
+
+    strrc = md.getSchemaTerm();
+    LOG.debug("Schemas are called " + strrc);
+
+    rs = md.getSchemas();
+    while (rs.next()) {
+      strrc = rs.getString(1);
+      LOG.debug("Found schema " + strrc);
+    }
+
+    strrc = md.getSearchStringEscape();
+    LOG.debug("Search string escape is " + strrc);
+
+    strrc = md.getStringFunctions();
+    LOG.debug("String functions are " + strrc);
+
+    strrc = md.getSystemFunctions();
+    LOG.debug("System functions are " + strrc);
+
+    rs = md.getTableTypes();
+    while (rs.next()) {
+      strrc = rs.getString(1);
+      LOG.debug("Found table type " + strrc);
+    }
+
+    strrc = md.getTimeDateFunctions();
+    LOG.debug("Time/date functions are " + strrc);
+
+    rs = md.getTypeInfo();
+    while (rs.next()) {
+      strrc = rs.getString(1);
+      LOG.debug("Found type " + strrc);
+    }
+
+    // In Hive 1.2 this always returns an empty RS
+    rs = md.getUDTs("a", "b", "d", null);
+
+    boolrc = md.supportsAlterTableWithAddColumn();
+    LOG.debug("Supports alter table with add column? " + boolrc);
+
+    boolrc = md.supportsAlterTableWithDropColumn();
+    LOG.debug("Supports alter table with drop column? " + boolrc);
+
+    boolrc = md.supportsBatchUpdates();
+    LOG.debug("Supports batch updates? " + boolrc);
+
+    boolrc = md.supportsCatalogsInDataManipulation();
+    LOG.debug("Supports catalogs in data manipulation? " + boolrc);
+
+    boolrc = md.supportsCatalogsInIndexDefinitions();
+    LOG.debug("Supports catalogs in index definition? " + boolrc);
+
+    boolrc = md.supportsCatalogsInPrivilegeDefinitions();
+    LOG.debug("Supports catalogs in privilege definition? " + boolrc);
+
+    boolrc = md.supportsCatalogsInProcedureCalls();
+    LOG.debug("Supports catalogs in procedure calls? " + boolrc);
+
+    boolrc = md.supportsCatalogsInTableDefinitions();
+    LOG.debug("Supports catalogs in table definition? " + boolrc);
+
+    boolrc = md.supportsColumnAliasing();
+    LOG.debug("Supports column aliasing? " + boolrc);
+
+    boolrc = md.supportsFullOuterJoins();
+    LOG.debug("Supports full outer joins? " + boolrc);
+
+    boolrc = md.supportsGroupBy();
+    LOG.debug("Supports group by? " + boolrc);
+
+    boolrc = md.supportsLimitedOuterJoins();
+    LOG.debug("Supports limited outer joins? " + boolrc);
+
+    boolrc = md.supportsMultipleResultSets();
+    LOG.debug("Supports limited outer joins? " + boolrc);
+
+    boolrc = md.supportsNonNullableColumns();
+    LOG.debug("Supports non-nullable columns? " + boolrc);
+
+    boolrc = md.supportsOuterJoins();
+    LOG.debug("Supports outer joins? " + boolrc);
+
+    boolrc = md.supportsPositionedDelete();
+    LOG.debug("Supports positioned delete? " + boolrc);
+
+    boolrc = md.supportsPositionedUpdate();
+    LOG.debug("Supports positioned update? " + boolrc);
+
+    boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+    LOG.debug("Supports result set holdability? " + boolrc);
+
+    boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
+    LOG.debug("Supports result set type? " + boolrc);
+
+    boolrc = md.supportsSavepoints();
+    LOG.debug("Supports savepoints? " + boolrc);
+
+    boolrc = md.supportsSchemasInDataManipulation();
+    LOG.debug("Supports schemas in data manipulation? " + boolrc);
+
+    boolrc = md.supportsSchemasInIndexDefinitions();
+    LOG.debug("Supports schemas in index definitions? " + boolrc);
+
+    boolrc = md.supportsSchemasInPrivilegeDefinitions();
+    LOG.debug("Supports schemas in privilege definitions? " + boolrc);
+
+    boolrc = md.supportsSchemasInProcedureCalls();
+    LOG.debug("Supports schemas in procedure calls? " + boolrc);
+
+    boolrc = md.supportsSchemasInTableDefinitions();
+    LOG.debug("Supports schemas in table definitions? " + boolrc);
+
+    boolrc = md.supportsSelectForUpdate();
+    LOG.debug("Supports select for update? " + boolrc);
+
+    boolrc = md.supportsStoredProcedures();
+    LOG.debug("Supports stored procedures? " + boolrc);
+
+    boolrc = md.supportsTransactions();
+    LOG.debug("Supports transactions? " + boolrc);
+
+    boolrc = md.supportsUnion();
+    LOG.debug("Supports union? " + boolrc);
+
+    boolrc = md.supportsUnionAll();
+    LOG.debug("Supports union all? " + boolrc);
+
+  }
+
+  @Test
+  public void setSchema() throws SQLException {
+    try (Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
+        ResultSet.CONCUR_READ_ONLY)) {
+
+      final String dbName = "odpi_jdbc_test_db";
+
+      final String tableName = "odpi_jdbc_test_table";
+      stmt.execute("drop table if exists " + tableName);
+
+      stmt.execute("drop database if exists " + dbName + " cascade");
+      stmt.execute("create database " + dbName);
+
+      conn.setSchema(dbName);
+
+      DatabaseMetaData md = conn.getMetaData();
+
+      ResultSet rs = md.getSchemas(null, dbName);
+
+      while (rs.next()) {
+        String schemaName = rs.getString(2);
+        LOG.debug("Schema name is " + schemaName);
+      }
+
+      stmt.execute("create table " + tableName + " (i int, s varchar(32))");
+
+      rs = md.getTables(null, dbName, tableName, null);
+      while (rs.next()) {
+        String tName = rs.getString(3);
+        LOG.debug("Schema name is " + tName);
+      }
+
+      rs = md.getColumns(null, dbName, tableName, "i");
+      while (rs.next()) {
+        String colName = rs.getString(4);
+        LOG.debug("Schema name is " + colName);
+      }
+
+      rs = md.getFunctions(null, dbName, "foo");
+      while (rs.next()) {
+        String funcName = rs.getString(3);
+        LOG.debug("Schema name is " + funcName);
+      }
+    }
+  }
+
+  @Test
+  public void statement() throws SQLException {
+    try (Statement stmt = conn.createStatement()) {
+      stmt.cancel();
+    }
+
+    try (Statement stmt = conn.createStatement()) {
+      stmt.clearWarnings();
+
+      final String tableName = "odpi_jdbc_statement_test_table";
+
+      stmt.execute("drop table if exists " + tableName);
+      stmt.execute("create table " + tableName + " (a int, b varchar(32))");
+
+      stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");
+
+      int intrc = stmt.getUpdateCount();
+      LOG.debug("Update count is " + intrc);
+
+      ResultSet rs = stmt.executeQuery("select * from " + tableName);
+      while (rs.next()) {
+        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+      }
+
+      Connection localConn = stmt.getConnection();
+
+      intrc = stmt.getFetchDirection();
+      LOG.debug("Fetch direction is " + intrc);
+
+      intrc = stmt.getFetchSize();
+      LOG.debug("Fetch size is " + intrc);
+
+      intrc = stmt.getMaxRows();
+      LOG.debug("max rows is " + intrc);
+
+      boolean boolrc = stmt.getMoreResults();
+      LOG.debug("more results is " + boolrc);
+
+      intrc = stmt.getQueryTimeout();
+      LOG.debug("query timeout is " + intrc);
+
+      stmt.execute("select * from " + tableName);
+      rs = stmt.getResultSet();
+      while (rs.next()) {
+        LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
+      }
+
+      intrc = stmt.getResultSetType();
+      LOG.debug("result set type is " + intrc);
+
+      SQLWarning warning = stmt.getWarnings();
+      while (warning != null) {
+        LOG.debug("Found a warning: " + warning.getMessage());
+        warning = warning.getNextWarning();
+      }
+
+      boolrc = stmt.isClosed();
+      LOG.debug("is closed " + boolrc);
+
+      boolrc = stmt.isCloseOnCompletion();
+      LOG.debug("is close on completion " + boolrc);
+
+      boolrc = stmt.isPoolable();
+      LOG.debug("is poolable " + boolrc);
+
+      stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
+      stmt.setFetchSize(500);
+      stmt.setMaxRows(500);
+    }
+  }
+
+  @Test
+  public void preparedStmtAndResultSet() throws SQLException {
+    final String tableName = "odpi_jdbc_psars_test_table";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + tableName);
+      stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, " +
+          "i int, lo bigint, sh smallint, st varchar(32))");
+    }
+
+    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
+    // try to put them in the query.
+    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName +
+        " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
+      ps.setBoolean(1, true);
+      ps.setByte(2, (byte)1);
+      ps.setDouble(3, 3.141592654);
+      ps.setFloat(4, 3.14f);
+      ps.setInt(5, 3);
+      ps.setLong(6, 10L);
+      ps.setShort(7, (short)20);
+      ps.setString(8, "abc");
+      ps.executeUpdate();
+    }
+
+    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " +
+        "values(?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
+      ps.setNull(1, Types.INTEGER);
+      ps.setObject(2, "mary had a little lamb");
+      ps.executeUpdate();
+      ps.setNull(1, Types.INTEGER, null);
+      ps.setString(2, "its fleece was white as snow");
+      ps.clearParameters();
+      ps.setNull(1, Types.INTEGER, null);
+      ps.setString(2, "its fleece was white as snow");
+      ps.execute();
+
+    }
+
+    try (Statement stmt = conn.createStatement()) {
+
+      ResultSet rs = stmt.executeQuery("select * from " + tableName);
+
+      ResultSetMetaData md = rs.getMetaData();
+
+      int colCnt = md.getColumnCount();
+      LOG.debug("Column count is " + colCnt);
+
+      for (int i = 1; i <= colCnt; i++) {
+        LOG.debug("Looking at column " + i);
+        String strrc = md.getColumnClassName(i);
+        LOG.debug("Column class name is " + strrc);
+
+        int intrc = md.getColumnDisplaySize(i);
+        LOG.debug("Column display size is " + intrc);
+
+        strrc = md.getColumnLabel(i);
+        LOG.debug("Column label is " + strrc);
+
+        strrc = md.getColumnName(i);
+        LOG.debug("Column name is " + strrc);
+
+        intrc = md.getColumnType(i);
+        LOG.debug("Column type is " + intrc);
+
+        strrc = md.getColumnTypeName(i);
+        LOG.debug("Column type name is " + strrc);
+
+        intrc = md.getPrecision(i);
+        LOG.debug("Precision is " + intrc);
+
+        intrc = md.getScale(i);
+        LOG.debug("Scale is " + intrc);
+
+        boolean boolrc = md.isAutoIncrement(i);
+        LOG.debug("Is auto increment? " + boolrc);
+
+        boolrc = md.isCaseSensitive(i);
+        LOG.debug("Is case sensitive? " + boolrc);
+
+        boolrc = md.isCurrency(i);
+        LOG.debug("Is currency? " + boolrc);
+
+        intrc = md.getScale(i);
+        LOG.debug("Scale is " + intrc);
+
+        intrc = md.isNullable(i);
+        LOG.debug("Is nullable? " + intrc);
+
+        boolrc = md.isReadOnly(i);
+        LOG.debug("Is read only? " + boolrc);
+
+      }
+
+      while (rs.next()) {
+        LOG.debug("bo = " + rs.getBoolean(1));
+        LOG.debug("bo = " + rs.getBoolean("bo"));
+        LOG.debug("ti = " + rs.getByte(2));
+        LOG.debug("ti = " + rs.getByte("ti"));
+        LOG.debug("db = " + rs.getDouble(3));
+        LOG.debug("db = " + rs.getDouble("db"));
+        LOG.debug("fl = " + rs.getFloat(4));
+        LOG.debug("fl = " + rs.getFloat("fl"));
+        LOG.debug("i = " + rs.getInt(5));
+        LOG.debug("i = " + rs.getInt("i"));
+        LOG.debug("lo = " + rs.getLong(6));
+        LOG.debug("lo = " + rs.getLong("lo"));
+        LOG.debug("sh = " + rs.getShort(7));
+        LOG.debug("sh = " + rs.getShort("sh"));
+        LOG.debug("st = " + rs.getString(8));
+        LOG.debug("st = " + rs.getString("st"));
+        LOG.debug("tm = " + rs.getObject(8));
+        LOG.debug("tm = " + rs.getObject("st"));
+        LOG.debug("tm was null " + rs.wasNull());
+      }
+      LOG.debug("bo is column " + rs.findColumn("bo"));
+
+      int intrc = rs.getConcurrency();
+      LOG.debug("concurrency " + intrc);
+
+      intrc = rs.getFetchDirection();
+      LOG.debug("fetch direction " + intrc);
+
+      intrc = rs.getType();
+      LOG.debug("type " + intrc);
+
+      Statement copy = rs.getStatement();
+
+      SQLWarning warning = rs.getWarnings();
+      while (warning != null) {
+        LOG.debug("Found a warning: " + warning.getMessage());
+        warning = warning.getNextWarning();
+      }
+      rs.clearWarnings();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
new file mode 100644
index 0000000..f247841
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestSql.java
@@ -0,0 +1,337 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.Test;
+
+import java.sql.SQLException;
+import java.sql.Statement;
+
+// This does not test every option that Hive supports, but does try to touch the major
+// options, especially anything unique to Hive.  See each test for areas tested and not tested.
+public class TestSql extends JdbcConnector {
+  private static final Log LOG = LogFactory.getLog(TestSql.class.getName());
+
+  @Test
+  public void db() throws SQLException {
+    final String db1 = "odpi_sql_db1";
+    final String db2 = "odpi_sql_db2";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop database if exists " + db1 + " cascade");
+
+      // Simple create database
+      stmt.execute("create database " + db1);
+      stmt.execute("drop database " + db1);
+
+      stmt.execute("drop schema if exists " + db2 + " cascade");
+
+      String location = getProperty(LOCATION, "a writable directory in HDFS");
+
+      // All the bells and whistles
+      stmt.execute("create schema if not exists " + db2 + " comment 'a db' location '" + location +
+          "' with dbproperties ('a' = 'b')");
+
+      stmt.execute("alter database " + db2 + " set dbproperties ('c' = 'd')");
+
+      stmt.execute("drop database " + db2 + " restrict");
+    }
+  }
+
+  @Test
+  public void table() throws SQLException {
+    final String table1 = "odpi_sql_table1";
+    final String table2 = "odpi_sql_table2";
+    final String table3 = "odpi_sql_table3";
+    final String table4 = "odpi_sql_table4";
+    final String table5 = "odpi_sql_table5";
+
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + table1);
+      stmt.execute("drop table if exists " + table2);
+      stmt.execute("drop table if exists " + table3);
+      stmt.execute("drop table if exists " + table4);
+      stmt.execute("drop table if exists " + table5);
+
+      String location = getProperty(LOCATION, "a writable directory in HDFS");
+      stmt.execute("create external table " + table1 + "(a int, b varchar(32)) location '" +
+          location + "'");
+
+      // With a little bit of everything, except partitions, we'll do those below
+      stmt.execute("create table if not exists " + table2 +
+          "(c1 tinyint," +
+          " c2 smallint," +
+          " c3 int comment 'a column comment'," +
+          " c4 bigint," +
+          " c5 float," +
+          " c6 double," +
+          " c7 decimal," +
+          " c8 decimal(12)," +
+          " c9 decimal(8,2)," +
+          " c10 timestamp," +
+          " c11 date," +
+          " c12 string," +
+          " c13 varchar(120)," +
+          " c14 char(10)," +
+          " c15 boolean," +
+          " c16 binary," +
+          " c17 array<string>," +
+          " c18 map <string, string>," +
+          " c19 struct<s1:int, s2:bigint>," +
+          " c20 uniontype<int, string>) " +
+          "comment 'table comment'" +
+          "clustered by (c1) sorted by (c2) into 10 buckets " +
+          "stored as orc " +
+          "tblproperties ('a' = 'b')");
+
+      // Not testing SKEWED BY, ROW FORMAT, STORED BY (storage handler
+
+      stmt.execute("create temporary table " + table3 + " like " + table2);
+
+      stmt.execute("insert into " + table1 + " values (3, 'abc'), (4, 'def')");
+
+      stmt.execute("create table " + table4 + " as select a, b from " + table1);
+
+      stmt.execute("truncate table " + table4);
+
+      stmt.execute("alter table " + table4 + " rename to " + table5);
+      stmt.execute("alter table " + table2 + " set tblproperties ('c' = 'd')");
+
+      // Not testing alter of clustered or sorted by, because that's suicidal
+      // Not testing alter of skewed or serde properties since we didn't test it for create
+      // above.
+
+      stmt.execute("drop table " + table1 + " purge");
+      stmt.execute("drop table " + table2);
+      stmt.execute("drop table " + table3);
+      stmt.execute("drop table " + table5);
+    }
+  }
+
+  @Test
+  public void partitionedTable() throws SQLException {
+    final String table1 = "odpi_sql_ptable1";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + table1);
+
+      stmt.execute("create table " + table1 +
+          "(c1 int," +
+          " c2 varchar(32))" +
+          "partitioned by (p1 string comment 'a partition column')" +
+          "stored as orc");
+
+      stmt.execute("alter table " + table1 + " add partition (p1 = 'a')");
+      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (1, 'abc')");
+      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (2, 'def')");
+      stmt.execute("insert into " + table1 + " partition (p1 = 'a') values (3, 'ghi')");
+      stmt.execute("alter table " + table1 + " partition (p1 = 'a') concatenate");
+      stmt.execute("alter table " + table1 + " touch partition (p1 = 'a')");
+
+      stmt.execute("alter table " + table1 + " add columns (c3 float)");
+      stmt.execute("alter table " + table1 + " drop partition (p1 = 'a')");
+
+      // Not testing rename partition, exchange partition, msck repair, archive/unarchive,
+      // set location, enable/disable no_drop/offline, compact (because not everyone may have
+      // ACID on), change column
+
+      stmt.execute("drop table " + table1);
+
+    }
+  }
+
+  @Test
+  public void view() throws SQLException {
+    final String table1 = "odpi_sql_vtable1";
+    final String view1 = "odpi_sql_view1";
+    final String view2 = "odpi_sql_view2";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + table1);
+      stmt.execute("drop view if exists " + view1);
+      stmt.execute("drop view if exists " + view2);
+      stmt.execute("create table " + table1 + "(a int, b varchar(32))");
+      stmt.execute("create view " + view1 + " as select a from " + table1);
+
+      stmt.execute("create view if not exists " + view2 +
+          " comment 'a view comment' " +
+          "tblproperties ('a' = 'b') " +
+          "as select b from " + table1);
+
+      stmt.execute("alter view " + view1 + " as select a, b from " + table1);
+      stmt.execute("alter view " + view2 + " set tblproperties('c' = 'd')");
+
+      stmt.execute("drop view " + view1);
+      stmt.execute("drop view " + view2);
+    }
+  }
+
+  // Not testing indices because they are currently useless in Hive
+  // Not testing macros because as far as I know no one uses them
+
+  @Test
+  public void function() throws SQLException {
+    final String func1 = "odpi_sql_func1";
+    final String func2 = "odpi_sql_func2";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("create temporary function " + func1 +
+          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+      stmt.execute("drop temporary function " + func1);
+
+      stmt.execute("drop function if exists " + func2);
+
+      stmt.execute("create function " + func2 +
+          " as 'org.apache.hadoop.hive.ql.udf.UDFToInteger'");
+      stmt.execute("drop function " + func2);
+    }
+  }
+
+  // Not testing grant/revoke/roles as different vendors use different security solutions
+  // and hence different things will work here.
+
+  // This covers insert (non-partitioned, partitioned, dynamic partitions, overwrite, with
+  // values and select), and multi-insert.  Load is not tested as there's no guarantee that the
+  // test machine has access to HDFS and thus the ability to upload a file.
+  @Test
+  public void insert() throws SQLException {
+    final String table1 = "odpi_insert_table1";
+    final String table2 = "odpi_insert_table2";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + table1);
+      stmt.execute("create table " + table1 +
+          "(c1 tinyint," +
+          " c2 smallint," +
+          " c3 int," +
+          " c4 bigint," +
+          " c5 float," +
+          " c6 double," +
+          " c7 decimal(8,2)," +
+          " c8 varchar(120)," +
+          " c9 char(10)," +
+          " c10 boolean)" +
+          " partitioned by (p1 string)");
+
+      // insert with partition
+      stmt.execute("explain insert into " + table1 + " partition (p1 = 'a') values " +
+          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+      stmt.execute("set hive.exec.dynamic.partition.mode=nonstrict");
+
+      // dynamic partition
+      stmt.execute("explain insert into " + table1 + " partition (p1) values " +
+          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true, 'b')," +
+          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true, 'b')");
+
+      stmt.execute("drop table if exists " + table2);
+
+      stmt.execute("create table " + table2 +
+          "(c1 tinyint," +
+          " c2 smallint," +
+          " c3 int," +
+          " c4 bigint," +
+          " c5 float," +
+          " c6 double," +
+          " c7 decimal(8,2)," +
+          " c8 varchar(120)," +
+          " c9 char(10)," +
+          " c10 boolean)");
+
+      stmt.execute("explain insert into " + table2 + " values " +
+          "(1, 2, 3, 4, 1.1, 2.2, 3.3, 'abcdef', 'ghi', true)," +
+          "(5, 6, 7, 8, 9.9, 8.8, 7.7, 'jklmno', 'pqr', true)");
+
+      stmt.execute("explain insert overwrite table " + table2 + " select c1, c2, c3, c4, c5, c6, " +
+          "c7, c8, c9, c10 from " + table1);
+
+      // multi-insert
+      stmt.execute("from " + table1 +
+          " insert into table " + table1 + " partition (p1 = 'c') " +
+          " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10" +
+          " insert into table " + table2 + " select c1, c2, c3, c4, c5, c6, c7, c8, c9, c10");
+    }
+  }
+
+  // This tests CTEs
+  @Test
+  public void cte() throws SQLException {
+    final String table1 = "odpi_cte_table1";
+    try (Statement stmt = conn.createStatement()) {
+      stmt.execute("drop table if exists " + table1);
+      stmt.execute("create table " + table1 + "(c1 int, c2 varchar(32))");
+      stmt.execute("with cte1 as (select c1 from " + table1 + " where c1 < 10) " +
+          " select c1 from cte1");
+    }
+  }
+
+  // This tests select, including CTEs, all/distinct, single tables, joins (inner & outer),
+  // group by (w/ and w/o having), order by, cluster by/distribute by/sort by, limit, union,
+  // subqueries, and over.
+
+  @Test
+  public void select() throws SQLException {
+    final String[] tables = {"odpi_select_table1", "odpi_select_table2"};
+    try (Statement stmt = conn.createStatement()) {
+      for (int i = 0; i < tables.length; i++) {
+        stmt.execute("drop table if exists " + tables[i]);
+        stmt.execute("create table " + tables[i] + "(c1 int, c2 varchar(32))");
+      }
+
+      // single table queries tested above in several places
+
+      stmt.execute("explain select all a.c2, SUM(a.c1), SUM(b.c1) " +
+          "from " + tables[0] + " a join " + tables[1] + " b on (a.c2 = b.c2) " +
+          "group by a.c2 " +
+          "order by a.c2 asc " +
+          "limit 10");
+
+      stmt.execute("explain select distinct a.c2 " +
+          "from " + tables[0] + " a left outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+          "order by a.c2 desc ");
+
+      stmt.execute("explain select a.c2, SUM(a.c1) " +
+          "from " + tables[0] + " a right outer join " + tables[1] + " b on (a.c2 = b.c2) " +
+          "group by a.c2 " +
+          "having SUM(b.c1) > 0 " +
+          "order by a.c2 ");
+
+      stmt.execute("explain select a.c2, rank() over (partition by a.c1) " +
+          "from " + tables[0] + " a full outer join " + tables[1] + " b on (a.c2 = b.c2) ");
+
+      stmt.execute("explain select c2 from " + tables[0] + " union all select c2 from " + tables[1]);
+
+      stmt.execute("explain select * from " + tables[0] + " distribute by c1 sort by c2");
+      stmt.execute("explain select * from " + tables[0] + " cluster by c1");
+
+      stmt.execute("explain select * from (select c1 from " + tables[0] + ") t");
+      stmt.execute("explain select * from " + tables[0] + " where c1 in (select c1 from " + tables[1] +
+          ")");
+
+    }
+
+  }
+
+  // Update and delete are not tested because not everyone configures their system to run
+  // with ACID.
+
+
+}
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
new file mode 100644
index 0000000..8e0abda
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/java/org/odpi/specs/runtime/hive/TestThrift.java
@@ -0,0 +1,251 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.odpi.specs.runtime.hive;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Random;
+
+public class TestThrift {
+
+  private static final Log LOG = LogFactory.getLog(TestThrift.class.getName());
+
+  private static IMetaStoreClient client = null;
+  private static HiveConf conf;
+
+  private Random rand;
+
+  @BeforeClass
+  public static void connect() throws MetaException {
+    if (JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift ")) {
+      String url = JdbcConnector.getProperty(JdbcConnector.METASTORE_URL, "Thrift metastore URL");
+      conf = new HiveConf();
+      conf.setVar(HiveConf.ConfVars.METASTOREURIS, url);
+      LOG.info("Set to test against metastore at " + url);
+      client = new HiveMetaStoreClient(conf);
+    }
+  }
+
+  @Before
+  public void checkIfActive() {
+    Assume.assumeTrue(JdbcConnector.testActive(JdbcConnector.TEST_THRIFT, "Test Thrift "));
+    rand = new Random();
+  }
+
+  @Test
+  public void db() throws TException {
+    final String dbName = "odpi_thrift_db_" + rand.nextInt(Integer.MAX_VALUE);
+
+    Database db = new Database(dbName, "a db", null, new HashMap<String, String>());
+    client.createDatabase(db);
+    db = client.getDatabase(dbName);
+    Assert.assertNotNull(db);
+    db = new Database(db);
+    db.getParameters().put("a", "b");
+    client.alterDatabase(dbName, db);
+    List<String> alldbs = client.getDatabases("odpi_*");
+    Assert.assertNotNull(alldbs);
+    Assert.assertTrue(alldbs.size() > 0);
+    alldbs = client.getAllDatabases();
+    Assert.assertNotNull(alldbs);
+    Assert.assertTrue(alldbs.size() > 0);
+    client.dropDatabase(dbName, true, true);
+  }
+
+  // Not testing types calls, as they aren't used AFAIK
+
+  @Test
+  public void nonPartitionedTable() throws TException {
+    final String tableName = "odpi_thrift_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+    // I don't test every operation related to tables, but only those that are frequently used.
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema fs = new FieldSchema("a", "int", "no comment");
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, null,
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+
+    table = client.getTable("default", tableName);
+    Assert.assertNotNull(table);
+
+    List<Table> tables =
+        client.getTableObjectsByName("default", Collections.singletonList(tableName));
+    Assert.assertNotNull(tables);
+    Assert.assertEquals(1, tables.size());
+
+    List<String> tableNames = client.getTables("default", "odpi_*");
+    Assert.assertNotNull(tableNames);
+    Assert.assertTrue(tableNames.size() >= 1);
+
+    tableNames = client.getAllTables("default");
+    Assert.assertNotNull(tableNames);
+    Assert.assertTrue(tableNames.size() >= 1);
+
+    List<FieldSchema> cols = client.getFields("default", tableName);
+    Assert.assertNotNull(cols);
+    Assert.assertEquals(1, cols.size());
+
+    cols = client.getSchema("default", tableName);
+    Assert.assertNotNull(cols);
+    Assert.assertEquals(1, cols.size());
+
+    table = new Table(table);
+    table.getParameters().put("a", "b");
+    client.alter_table("default", tableName, table, false);
+
+    table.getParameters().put("c", "d");
+    client.alter_table("default", tableName, table);
+
+    client.dropTable("default", tableName, true, false);
+  }
+
+  @Test
+  public void partitionedTable() throws TException {
+    final String tableName = "odpi_thrift_partitioned_table_" + rand.nextInt(Integer.MAX_VALUE);
+
+    // I don't test every operation related to tables, but only those that are frequently used.
+    SerDeInfo serde = new SerDeInfo("default_serde",
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), new HashMap<String, String>());
+    FieldSchema fs = new FieldSchema("a", "int", "no comment");
+    StorageDescriptor sd = new StorageDescriptor(Collections.singletonList(fs), null,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    FieldSchema pk = new FieldSchema("pk", "string", "");
+    Table table = new Table(tableName, "default", "me", 0, 0, 0, sd, Collections.singletonList(pk),
+        new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(table);
+
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    Partition partition = new Partition(Collections.singletonList("x"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>());
+    client.add_partition(partition);
+
+    List<Partition> partitions = new ArrayList<>(2);
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    partitions.add(new Partition(Collections.singletonList("y"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>()));
+    sd = new StorageDescriptor(Collections.singletonList(fs), null,
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE),
+        conf.getVar(HiveConf.ConfVars.HIVEDEFAULTSERDE), false, 0, serde, null, null,
+        new HashMap<String, String>());
+    partitions.add(new Partition(Collections.singletonList("z"), "default", tableName, 0,
+        0, sd, new HashMap<String, String>()));
+    client.add_partitions(partitions);
+
+    List<Partition> parts = client.listPartitions("default", tableName, (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(3, parts.size());
+
+    parts = client.listPartitions("default", tableName, Collections.singletonList("x"),
+        (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    parts = client.listPartitionsWithAuthInfo("default", tableName, (short)-1, "me",
+        Collections.<String>emptyList());
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(3, parts.size());
+
+    List<String> partNames = client.listPartitionNames("default", tableName, (short)-1);
+    Assert.assertNotNull(partNames);
+    Assert.assertEquals(3, partNames.size());
+
+    parts = client.listPartitionsByFilter("default", tableName, "pk = \"x\"", (short)-1);
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    parts = client.getPartitionsByNames("default", tableName, Collections.singletonList("pk=x"));
+    Assert.assertNotNull(parts);
+    Assert.assertEquals(1, parts.size());
+
+    partition = client.getPartition("default", tableName, Collections.singletonList("x"));
+    Assert.assertNotNull(partition);
+
+    partition = client.getPartition("default", tableName, "pk=x");
+    Assert.assertNotNull(partition);
+
+    partition = client.getPartitionWithAuthInfo("default", tableName, Collections.singletonList("x"),
+        "me", Collections.<String>emptyList());
+    Assert.assertNotNull(partition);
+
+    partition = new Partition(partition);
+    partition.getParameters().put("a", "b");
+    client.alter_partition("default", tableName, partition);
+
+    for (Partition p : parts) p.getParameters().put("c", "d");
+    client.alter_partitions("default", tableName, parts);
+
+    // Not testing get_partitions_by_expr because I don't want to hard code some byte sequence
+    // from the parser.  The odds that anyone other than Hive parser would call this method seem
+    // low, since you'd have to exactly match the serliazation of the Hive parser.
+
+    // Not testing partition marking events, not used by anyone but Hive replication AFAIK
+
+    client.dropPartition("default", tableName, "pk=x", true);
+    client.dropPartition("default", tableName, Collections.singletonList("y"), true);
+  }
+
+  // Not testing index calls, as no one uses indices
+
+
+  // Not sure if anyone uses stats calls or not.  Other query engines might.  Ignoring for now.
+
+  // Not sure if anyone else uses functions, though I'm guessing not as without Hive classes they
+  // won't be runable.
+
+  // Not testing authorization calls as AFAIK no one else uses Hive security
+
+  // Not testing transaction/locking calls, as those are used only by Hive.
+
+  // Not testing notification logging calls, as those are used only by Hive replication.
+
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py b/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py
new file mode 100755
index 0000000..091c496
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/python/find-public-apis.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+import re
+import warnings
+from optparse import OptionParser
+
+def main():
+  parser = OptionParser()
+  parser.add_option("-d", "--directory", help="Top level directory of source tree")
+  parser.add_option("-r", "--report", help="API compatibility report file, in HTML format")
+
+  (options, args) = parser.parse_args()
+
+  # Get the ATS endpoint if it's not given.
+  if options.directory == None:
+    print "You must specify a top level directory of the source tree"
+    return 1
+
+  if options.report == None:
+    print "You must specify the report to check against"
+    return 1
+
+  publicClasses = set()
+  for directory in os.walk(options.directory):
+    for afile in directory[2]:
+      if re.search("\.java$", afile) != None:
+        handle = open(os.path.join(directory[0], afile))
+        # Figure out the package we're in
+        pre = re.search("org/apache/hadoop[\w/]*", directory[0])
+        if pre == None:
+           warnings.warn("No package for " + directory[0])
+           continue
+        package = pre.group(0)
+        expecting = 0
+        for line in handle:
+          if re.search("@InterfaceAudience.Public", line) != None:
+            expecting = 1
+          classname = re.search("class (\w*)", line)
+          if classname != None and expecting == 1:
+            publicClasses.add(package + "/" + classname.group(1))
+            expecting = 0
+        handle.close()
+
+  handle = open(options.report)
+  haveChecked = set()
+  for line in handle:
+    classre = re.search("mangled: <b>(org/apache/hadoop[\w/]+)", line)
+    if classre != None:
+      classname = classre.group(1)
+      if classname not in haveChecked:
+        if classname in publicClasses:
+          print "Warning, found change in public class " + classname
+        haveChecked.add(classname)
+  handle.close()
+  
+
+
+
+main()
+
+      


[36/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

Posted by rv...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json b/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
deleted file mode 100644
index 6a6c7af..0000000
--- a/bigtop-tests/spec-tests/runtime/src/test/resources/hadoop-common-2.7.3-api-report.json
+++ /dev/null
@@ -1 +0,0 @@
-{"name":"hadoop-common","version":"2.7.3","classes":{"org.apache.hadoop.record.RecordInput":{"name":"org.apache.hadoop.record.RecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOException":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"]
 ,"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","re
 turnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.NullWritable":{"name":"org.apache.hadoop.io.NullWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.NullWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.NullWr
 itable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"org.apache.hadoop.io.NullWritable get()":{"name":"get","returnType":"org.apache.hadoop.io.NullWritable","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.record.XmlRecordInput":{"name":"org.apache.hadoop.record.XmlRecordInput","methods":{"long readLong(java.lang.String) throws java.io.IOEx
 ception":{"name":"readLong","returnType":"long","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endMap(java.lang.String) throws java.io.IOException":{"name":"endMap","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Buffer readBuffer(java.lang.String) throws java.io.IOException":{"name":"readBuffer","returnType":"org.apache.hadoop.record.Buffer","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endVector(java.lang.String) throws java.io.IOException":{"name":"endVector","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"byte readByte(java.lang.String) throws java.io.IOException":{"name":"readByte","returnType":"byte","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"float readFloat(java.lang.String) throws java.io.IOException":{"name":"readFloat","returnType":"float","args":["java.lang.String"],"exceptions":["java.io.IOExcept
 ion"]},"void startRecord(java.lang.String) throws java.io.IOException":{"name":"startRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"double readDouble(java.lang.String) throws java.io.IOException":{"name":"readDouble","returnType":"double","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void endRecord(java.lang.String) throws java.io.IOException":{"name":"endRecord","returnType":"void","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.lang.String) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"int readInt(java.lang.String) throws java.io.IOException":{"name":"readInt","returnType":"int","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"boolean readBool(java.lang.String) throws java.io.IOException":{"name":"readBool","returnType":"boolean","args":["java.
 lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startVector(java.lang.String) throws java.io.IOException":{"name":"startVector","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.record.Index startMap(java.lang.String) throws java.io.IOException":{"name":"startMap","returnType":"org.apache.hadoop.record.Index","args":["java.lang.String"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FileSystem":{"name":"org.apache.hadoop.fs.FileSystem","methods":{"org.apache.hadoop.security.token.Token getDelegationToken(java.lang.String) throws java.io.IOException":{"name":"getDelegationToken","returnType":"org.apache.hadoop.security.token.Token","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSyst
 em","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"short getDefaultReplication(org.apache.hadoop.fs.Path)":{"name":"getDefaultReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"[B getXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"getXAttr","returnType":"[B","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"boolean setReplication(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"setReplication","returnType":"boolean","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.ContentSummary getContentSummary(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getContentSummary","returnType":"org.apache.hadoop.fs.ContentSummary","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream crea
 te(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.PathFilter) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileSystem; getChildFileSystems()":{"name":"getChildFileSystems","returnType":"[Lorg.apache.hadoop.fs.FileSystem;","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.ut
 il.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"java.lang.Class getFileSystemClass(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getFileSystemClass","returnType":"java.lang.Class","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache
 .hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"java.util.Map getStatistics()":{"name":"getStatistics","returnType":"java.util.Map","args":[],"exceptions":[]},"org.apache.hadoop.fs.RemoteIterator listStatusIterator(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatusIterator","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FileSystem$Statistics getStatistics(java.lang.String, java.lang.Class)":{"name":"getStatistics","returnType":"org.apache.hadoop.fs.FileSystem$Statistics","args":["java.lang.String","java.lang.Class"],"excep
 tions":[]},"boolean isFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void renameSnapshot(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"renameSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getStatus","return
 Type":"org.apache.hadoop.fs.FsStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listFiles(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listFiles","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void copyToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IO
 Exception":{"name":"copyToLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean areSymlinksEnabled()":{"name":"areSymlinksEnabled","returnType":"boolean","args":[],"exceptions":[]},"boolean createNewFile(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createNewFile","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) th
 rows java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"boolean deleteOnExit(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"deleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream",
 "args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.lang.String)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":[]},"org.apache.hadoop.fs.permission.AclStatus getAclStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getAclStatus","returnType":"org.apache.hadoop.fs.permission.AclStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean mkdirs(org.apache.hadoop.fs.FileSystem, org.apache.hadoo
 p.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"mkdirs","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void printStatistics() throws java.io.IOException":{"name":"printStatistics","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"void setOwner(org.apache.hadoop.fs.Path, java.lang.String, java.lang.String) throws java.io.IOException":{"name":"setOwner","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void moveFromLocalFile(org.apache.hadoop.fs
 .Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"void deleteSnapshot(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"deleteSnapshot","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"long getDefaultBlockSize()":{"name":"getDefaultBlockSize","returnType":"long","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataInputSt
 ream open(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsStatus getStatus() throws java.io.IOException":{"name":"getStatus","returnType":"org.apache.hadoop.fs.FsStatus","args":[],"exceptions":["java.io.IOException"]},"java.net.URI getUri()":{"name":"getUri","returnType":"java.net.URI","args":[],"exceptions":[]},"[Lorg.apache.hadoop.security.token.Token; addDelegationTokens(java.lang.String, org.apache.hadoop.security.Credentials) throws java.io.IOException":{"name":"addDelegationTokens","returnType":"[Lorg.apache.hadoop.security.token.Token;","args":["java.lang.String","org.apache.hadoop.security.Credentials"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getHomeDirectory()":{"name":"getHomeDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void comple
 teLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"completeLocalOutput","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean rename(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"rename","returnType":"boolean","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileStatus getFileStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void enableSymlinks()":{"name":"enableSymlinks","returnType":"void","args":[],"exceptions":[]},"void moveToLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToLocalFile","returnType":"void","args":["org.
 apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listCorruptFileBlocks(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listCorruptFileBlocks","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setAcl(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"setAcl","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path createSnapshot(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"createSnapshot","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksu
 m","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void setPermission(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission) throws java.io.IOException":{"name":"setPermission","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path startLocalOutput(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"startLocalOutput","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setWriteChecksum(boolean)":{"name":"setWriteChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"java.lang.String getScheme()":{"name":"getScheme","returnType":"java.lang.String","args":[],"exceptions":[]},"boolean delete(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"delete","returnType":"
 boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"boolean isDirectory(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"isDirectory","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getDefaultReplication()":{"name":"getDefaultReplication","returnType":"short","args":[],"exceptions":[]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.Strin
 g, [B) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B"],"exceptions":["java.io.IOException"]},"boolean cancelDeleteOnExit(org.apache.hadoop.fs.Path)":{"name":"cancelDeleteOnExit","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyToLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyToLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.BlockLocation; getFileBlockLocations(org.apache.hadoop.fs.FileStatus, long, long) throws java.io.IOException":{"name":"getFileBlockLocations","returnType":"[Lorg.apache.hadoop.fs.BlockLocation;","args":["org.apache.hadoop.fs.FileStatus","long","long"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path, or
 g.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"void removeDefaultAcl(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"removeDefaultAcl","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem getNamed(java.lang.String, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"getNamed","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.lang.String","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.PathFilter) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnT
 ype":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.PathFilter"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"long getDefaultBlockSize(org.apache.hadoop.fs.Path)":{"name":"getDefaultBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void concat(org.apache.hadoop.fs.Path, [Lorg.apache.hadoop.fs.Path;) throws java.io.IOException":{"name":"concat","returnType":"void","args":["org.apache.hadoop.fs.Path","[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException"]},"void initialize(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"initialize","returnType":"void","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDa
 taOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path makeQualified(org.apache.hadoop.fs.Path)":{"name":"makeQualified","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, short) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","short"],"exceptions":["java.io.IOException"]},"void setTimes(org.apache.hadoop.fs.Path, long, long) throws java.io.IOException":{"name":"setTim
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","long","long"],"exceptions":["java.io.IOException"]},"long getUsed() throws java.io.IOException":{"name":"getUsed","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void moveFromLocalFile([Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveFromLocalFile","returnType":"void","args":["[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.net.URI getDefaultUri(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultUri","returnType":"java.net.URI","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, short, long) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","short","long"],"exceptions":["java.io.IOException"]}
 ,"java.util.Map getXAttrs(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"void setVerifyChecksum(boolean)":{"name":"setVerifyChecksum","returnType":"void","args":["boolean"],"exceptions":[]},"org.apache.hadoop.fs.FileStatus getFileLinkStatus(org.apache.hadoop.fs.Path) throws org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"getFileLinkStatus","returnType":"org.apache.hadoop.fs.FileStatus","args":["org.apache.hadoop.fs.Path"],"exceptions":["org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"void copyFromLocalFile(boolean, boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.I
 OException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","org.apache.hadoop.fs.Path","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; globStatus(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"globStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream createNonRecursive(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createNonRecursive","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getLinkTarget(org.a
 pache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLinkTarget","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration, java.lang.String) throws java.lang.InterruptedException, java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration","java.lang.String"],"exceptions":["java.lang.InterruptedException","java.io.IOException"]},"void createSymlink(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path, boolean) throws org.apache.hadoop.fs.FileAlreadyExistsException, org.apache.hadoop.security.AccessControlException, org.apache.hadoop.fs.ParentNotDirectoryException, org.apache.hadoop.fs.UnsupportedFileSystemException, java.io.IOException, java.io.FileNotFoundException":{"name":"createSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path","
 org.apache.hadoop.fs.Path","boolean"],"exceptions":["org.apache.hadoop.fs.FileAlreadyExistsException","org.apache.hadoop.security.AccessControlException","org.apache.hadoop.fs.ParentNotDirectoryException","org.apache.hadoop.fs.UnsupportedFileSystemException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int"],"exceptions":["java.io.IOException"]},"boolean delete(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"delete","returnType":"boolean","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void copyFromLocalFile(boolean, org.apache.hadoop.fs.Path, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","org.apache.hadoop.fs.Path","org.ap
 ache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.List getAllStatistics()":{"name":"getAllStatistics","returnType":"java.util.List","args":[],"exceptions":[]},"void access(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsAction) throws org.apache.hadoop.security.AccessControlException, java.io.IOException, java.io.FileNotFoundException":{"name":"access","returnType":"void","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsAction"],"exceptions":["org.apache.hadoop.security.AccessControlException","java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean"],"exceptions":["java.io.IOException"]},"void removeAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"removeAclEntri
 es","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void closeAllForUGI(org.apache.hadoop.security.UserGroupInformation) throws java.io.IOException":{"name":"closeAllForUGI","returnType":"void","args":["org.apache.hadoop.security.UserGroupInformation"],"exceptions":["java.io.IOException"]},"void setDefaultUri(org.apache.hadoop.conf.Configuration, java.net.URI)":{"name":"setDefaultUri","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.net.URI"],"exceptions":[]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, boolean, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apach
 e.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","boolean","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FSDataOutputStream append(org.apache.hadoop.fs.Path, int, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"append","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","int","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"void clearStatistics()":{"name":"clearStatistics","returnType":"void","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileChecksum getFileChecksum(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getFileChecksum","returnType":"org.apache.hadoop.fs.FileChecksum","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void removeXAttr(org.apache.hadoop.fs.Path, java.lang.String) throws java.io.IOException":{"name":"removeXAttr","returnType":"void","args":["org.
 apache.hadoop.fs.Path","java.lang.String"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FileSystem newInstance(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path getWorkingDirectory()":{"name":"getWorkingDirectory","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"org.apache.hadoop.fs.FileSystem get(java.net.URI, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"get","returnType":"org.apache.hadoop.fs.FileSystem","args":["java.net.URI","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"java.util.List listXAttrs(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"listXAttrs","returnType":"java.util.List","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.I
 OException"]},"org.apache.hadoop.fs.FileSystem newInstance(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstance","returnType":"org.apache.hadoop.fs.FileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void setWorkingDirectory(org.apache.hadoop.fs.Path)":{"name":"setWorkingDirectory","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"void copyFromLocalFile(boolean, boolean, [Lorg.apache.hadoop.fs.Path;, org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"copyFromLocalFile","returnType":"void","args":["boolean","boolean","[Lorg.apache.hadoop.fs.Path;","org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.Path resolvePath(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"resolvePath","returnType":"org.apache.hadoop.fs.Path","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.util.Map getXAttrs(o
 rg.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getXAttrs","returnType":"java.util.Map","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"long getLength(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getLength","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"java.lang.String getCanonicalServiceName()":{"name":"getCanonicalServiceName","returnType":"java.lang.String","args":[],"exceptions":[]},"long getBlockSize(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getBlockSize","returnType":"long","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"short getReplication(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"getReplication","returnType":"short","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void modifyAclEntries(org.apache.hadoop.fs.Path, java.util.List) throws java.io.IOException":{"name":"mo
 difyAclEntries","returnType":"void","args":["org.apache.hadoop.fs.Path","java.util.List"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.RemoteIterator listLocatedStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listLocatedStatus","returnType":"org.apache.hadoop.fs.RemoteIterator","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataInputStream open(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"open","returnType":"org.apache.hadoop.fs.FSDataInputStream","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus(org.apache.hadoop.fs.Path) throws java.io.IOException, java.io.FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException","java.io.FileNotFound
 Exception"]},"org.apache.hadoop.fs.LocalFileSystem newInstanceLocal(org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"newInstanceLocal","returnType":"org.apache.hadoop.fs.LocalFileSystem","args":["org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"boolean exists(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"exists","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void setXAttr(org.apache.hadoop.fs.Path, java.lang.String, [B, java.util.EnumSet) throws java.io.IOException":{"name":"setXAttr","returnType":"void","args":["org.apache.hadoop.fs.Path","java.lang.String","[B","java.util.EnumSet"],"exceptions":["java.io.IOException"]},"boolean supportsSymlinks()":{"name":"supportsSymlinks","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.FSDataOut
 putStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, java.util.EnumSet, int, short, long, org.apache.hadoop.util.Progressable, org.apache.hadoop.fs.Options$ChecksumOpt) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","java.util.EnumSet","int","short","long","org.apache.hadoop.util.Progressable","org.apache.hadoop.fs.Options$ChecksumOpt"],"exceptions":["java.io.IOException"]},"boolean truncate(org.apache.hadoop.fs.Path, long) throws java.io.IOException":{"name":"truncate","returnType":"boolean","args":["org.apache.hadoop.fs.Path","long"],"exceptions":["java.io.IOException"]},"void closeAll() throws java.io.IOException":{"name":"closeAll","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.fs.FileStatus; listStatus([Lorg.apache.hadoop.fs.Path;) throws java.io.IOException, java.io
 .FileNotFoundException":{"name":"listStatus","returnType":"[Lorg.apache.hadoop.fs.FileStatus;","args":["[Lorg.apache.hadoop.fs.Path;"],"exceptions":["java.io.IOException","java.io.FileNotFoundException"]},"org.apache.hadoop.fs.FSDataOutputStream create(org.apache.hadoop.fs.Path, org.apache.hadoop.fs.permission.FsPermission, boolean, int, short, long, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"create","returnType":"org.apache.hadoop.fs.FSDataOutputStream","args":["org.apache.hadoop.fs.Path","org.apache.hadoop.fs.permission.FsPermission","boolean","int","short","long","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.fs.FsServerDefaults getServerDefaults() throws java.io.IOException":{"name":"getServerDefaults","returnType":"org.apache.hadoop.fs.FsServerDefaults","args":[],"exceptions":["java.io.IOException"]},"void close() throws java.io.IOException":{"name":"close","returnType":"void","args":[],"exceptions"
 :["java.io.IOException"]}}},"org.apache.hadoop.fs.BlockLocation":{"name":"org.apache.hadoop.fs.BlockLocation","methods":{"[Ljava.lang.String; getCachedHosts()":{"name":"getCachedHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"void setTopologyPaths([Ljava.lang.String;) throws java.io.IOException":{"name":"setTopologyPaths","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setHosts([Ljava.lang.String;) throws java.io.IOException":{"name":"setHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setCorrupt(boolean)":{"name":"setCorrupt","returnType":"void","args":["boolean"],"exceptions":[]},"[Ljava.lang.String; getNames() throws java.io.IOException":{"name":"getNames","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getTopologyPaths() throws java.io.IOException":{"name":"getTopologyPaths","returnType":"[Ljava.lang.Str
 ing;","args":[],"exceptions":["java.io.IOException"]},"long getLength()":{"name":"getLength","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"long getOffset()":{"name":"getOffset","returnType":"long","args":[],"exceptions":[]},"void setOffset(long)":{"name":"setOffset","returnType":"void","args":["long"],"exceptions":[]},"void setNames([Ljava.lang.String;) throws java.io.IOException":{"name":"setNames","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.io.IOException"]},"void setLength(long)":{"name":"setLength","returnType":"void","args":["long"],"exceptions":[]},"[Ljava.lang.String; getHosts() throws java.io.IOException":{"name":"getHosts","returnType":"[Ljava.lang.String;","args":[],"exceptions":["java.io.IOException"]},"boolean isCorrupt()":{"name":"isCorrupt","returnType":"boolean","args":[],"exceptions":[]},"void setCachedHosts([Ljava.lang.String;)":{"
 name":"setCachedHosts","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":[]}}},"org.apache.hadoop.io.Text":{"name":"org.apache.hadoop.io.Text","methods":{"java.lang.String readString(java.io.DataInput, int) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(org.apache.hadoop.io.Text)":{"name":"set","returnType":"void","args":["org.apache.hadoop.io.Text"],"exceptions":[]},"void validateUTF8([B, int, int) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B","int","int"],"exceptions":["java.nio.charset.MalformedInputException"]},"int getLength()":{"name":"getLength","returnType":"int","args":[],"exceptions":[]},"void readFields(java.io.DataInput, int) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["j
 ava.io.DataInput","int"],"exceptions":["java.io.IOException"]},"void set([B, int, int)":{"name":"set","returnType":"void","args":["[B","int","int"],"exceptions":[]},"int bytesToCodePoint(java.nio.ByteBuffer)":{"name":"bytesToCodePoint","returnType":"int","args":["java.nio.ByteBuffer"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void append([B, int, int)":{"name":"append","returnType":"void","args":["[B","int","int"],"exceptions":[]},"java.lang.String decode([B, int, int, boolean) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int","boolean"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String, boolean) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String","boolean"],"exceptions":["java.nio.char
 set.CharacterCodingException"]},"int writeString(java.io.DataOutput, java.lang.String, int) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String","int"],"exceptions":["java.io.IOException"]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B, int, int) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B","int","int"],"exceptions":["java.nio.charset.CharacterCodingException"]},"java.nio.ByteBuffer encode(java.lang.String) throws java.nio.charset.CharacterCodingException":{"name":"encode","returnType":"java.nio.ByteBuffer","args":["java.lang.String"],"exceptions":["java.nio.charset.CharacterCodingException"]},"int writeS
 tring(java.io.DataOutput, java.lang.String) throws java.io.IOException":{"name":"writeString","returnType":"int","args":["java.io.DataOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"[B getBytes()":{"name":"getBytes","returnType":"[B","args":[],"exceptions":[]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"void write(java.io.DataOutput, int) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput","int"],"exceptions":["java.io.IOException"]},"void set(java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String"],"exceptions":[]},"int utf8Length(java.lang.String)":{"name":"utf8Length","returnType":"int","args":["java.lang.String"],"exceptions":[]},"void readWithKnownLength(java.io.DataInput, int) throws java.io.IOException":{"name":"readWithKnownLength","returnType":"void","args":["java.io.DataInput","int"],"exceptions":["java.io.IOException"]},"java.lang.String readString(java.i
 o.DataInput) throws java.io.IOException":{"name":"readString","returnType":"java.lang.String","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"java.lang.String decode([B) throws java.nio.charset.CharacterCodingException":{"name":"decode","returnType":"java.lang.String","args":["[B"],"exceptions":["java.nio.charset.CharacterCodingException"]},"void skip(java.io.DataInput) throws java.io.IOException":{"name":"skip","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"int charAt(int)":{"name":"charAt","returnType":"int","args":["int"],"exceptions":[]},"int find(java.lang.String, int)":{"name":"find","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void set([B)":{"name":"set","returnType":"void","args":["[B"],"exceptions":[]},"int find(java.lang.String)":{"name":"find","returnType":"int","args":["java.lang.String"],"exceptions":[]},"[B copyBytes()":{"name":"copyBytes","returnType":"[B","args":[],"exceptions":[]}
 ,"void validateUTF8([B) throws java.nio.charset.MalformedInputException":{"name":"validateUTF8","returnType":"void","args":["[B"],"exceptions":["java.nio.charset.MalformedInputException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.Writable":{"name":"org.apache.hadoop.io.Writable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VLongWritable":{"name":"org.apache.hadoop.io.VLongWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"void set(long)":{"name":"set","return
 Type":"void","args":["long"],"exceptions":[]},"long get()":{"name":"get","returnType":"long","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.VLongWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.VLongWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.VersionedWritable
 ":{"name":"org.apache.hadoop.io.VersionedWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"byte getVersion()":{"name":"getVersion","returnType":"byte","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.SequenceFile":{"name":"org.apache.hadoop.io.SequenceFile","methods":{"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configu
 ration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileContext, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata, java.util.EnumSet, [Lorg.apache.hadoop.fs.Options$CreateOpts;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileContext","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata","java.util.EnumSet","[Lorg.apache.hadoop.
 fs.Options$CreateOpts;"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, [Lorg.apache.hadoop.io.SequenceFile$Writer$Option;) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","[Lorg.apache.hadoop.io.SequenceFile$Writer$Option;"],"exceptions":["java.io.IOException"]},"void setDefaultCompressionType(org.apache.hadoop.conf.Configuration, org.apache.hadoop.io.SequenceFile$CompressionType)":{"name":"setDefaultCompressionType","returnType":"void","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.io.SequenceFile$CompressionType"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, org.apache.hadoop.io.SequenceFile$Compressi
 onType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","r
 eturnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.FSDataOutputStream, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.FSDataOutputStream","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOExcepti
 on"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, int, short, long, boolean, org.apache.hadoop.io.Seq
 uenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","int","short","long","boolean","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.l
 ang.Class","java.lang.Class"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.util.Progressable) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.util.Progressable"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs.Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.Compr
 essionCodec, org.apache.hadoop.util.Progressable, org.apache.hadoop.io.SequenceFile$Metadata) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec","org.apache.hadoop.util.Progressable","org.apache.hadoop.io.SequenceFile$Metadata"],"exceptions":["java.io.IOException"]},"org.apache.hadoop.io.SequenceFile$CompressionType getDefaultCompressionType(org.apache.hadoop.conf.Configuration)":{"name":"getDefaultCompressionType","returnType":"org.apache.hadoop.io.SequenceFile$CompressionType","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.SequenceFile$Writer createWriter(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.conf.Configuration, org.apache.hadoop.fs
 .Path, java.lang.Class, java.lang.Class, org.apache.hadoop.io.SequenceFile$CompressionType, org.apache.hadoop.io.compress.CompressionCodec) throws java.io.IOException":{"name":"createWriter","returnType":"org.apache.hadoop.io.SequenceFile$Writer","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.conf.Configuration","org.apache.hadoop.fs.Path","java.lang.Class","java.lang.Class","org.apache.hadoop.io.SequenceFile$CompressionType","org.apache.hadoop.io.compress.CompressionCodec"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists":{"name":"org.apache.hadoop.io.file.tfile.MetaBlockAlreadyExists","methods":{}},"org.apache.hadoop.fs.FileStatus":{"name":"org.apache.hadoop.fs.FileStatus","methods":{"org.apache.hadoop.fs.permission.FsPermission getPermission()":{"name":"getPermission","returnType":"org.apache.hadoop.fs.permission.FsPermission","args":[],"exceptions":[]},"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"
 exceptions":[]},"boolean isFile()":{"name":"isFile","returnType":"boolean","args":[],"exceptions":[]},"long getBlockSize()":{"name":"getBlockSize","returnType":"long","args":[],"exceptions":[]},"java.lang.String getOwner()":{"name":"getOwner","returnType":"java.lang.String","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getPath()":{"name":"getPath","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":[]},"void setSymlink(org.apache.hadoop.fs.Path)":{"name":"setSymlink","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"long getAccessTime()":{"name":"getAccessTime","returnType":"long","args":[],"exceptions":[]},"boolean isDir()":{"name":"isDir","returnType":"boolean","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"boolean isEncr
 ypted()":{"name":"isEncrypted","returnType":"boolean","args":[],"exceptions":[]},"long getLen()":{"name":"getLen","returnType":"long","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void setPath(org.apache.hadoop.fs.Path)":{"name":"setPath","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"org.apache.hadoop.fs.Path getSymlink() throws java.io.IOException":{"name":"getSymlink","returnType":"org.apache.hadoop.fs.Path","args":[],"exceptions":["java.io.IOException"]},"short getReplication()":{"name":"getReplication","returnType":"short","args":[],"exceptions":[]},"boolean isDirectory()":{"name":"isDirectory","returnType":"boolean","args":[],"exceptions":[]},"java.lang.String getGroup()":{"name":"getGroup","returnTy
 pe":"java.lang.String","args":[],"exceptions":[]},"boolean isSymlink()":{"name":"isSymlink","returnType":"boolean","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]},"long getModificationTime()":{"name":"getModificationTime","returnType":"long","args":[],"exceptions":[]}}},"org.apache.hadoop.util.PureJavaCrc32":{"name":"org.apache.hadoop.util.PureJavaCrc32","methods":{"void update([B, int, int)":{"name":"update","returnType":"void","args":["[B","int","int"],"exceptions":[]},"long getValue()":{"name":"getValue","returnType":"long","args":[],"exceptions":[]},"void reset()":{"name":"reset","returnType":"void","args":[],"exceptions":[]},"void update(int)":{"name":"update","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.fs.Trash":{"name":"org.apache.hadoop.fs.Trash","methods":{"java.lang.Runnable getEmptier() throws j
 ava.io.IOException":{"name":"getEmptier","returnType":"java.lang.Runnable","args":[],"exceptions":["java.io.IOException"]},"boolean moveToTrash(org.apache.hadoop.fs.Path) throws java.io.IOException":{"name":"moveToTrash","returnType":"boolean","args":["org.apache.hadoop.fs.Path"],"exceptions":["java.io.IOException"]},"void expunge() throws java.io.IOException":{"name":"expunge","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean moveToAppropriateTrash(org.apache.hadoop.fs.FileSystem, org.apache.hadoop.fs.Path, org.apache.hadoop.conf.Configuration) throws java.io.IOException":{"name":"moveToAppropriateTrash","returnType":"boolean","args":["org.apache.hadoop.fs.FileSystem","org.apache.hadoop.fs.Path","org.apache.hadoop.conf.Configuration"],"exceptions":["java.io.IOException"]},"void checkpoint() throws java.io.IOException":{"name":"checkpoint","returnType":"void","args":[],"exceptions":["java.io.IOException"]},"boolean isEnabled()":{"name":"isEnabled","returnT
 ype":"boolean","args":[],"exceptions":[]}}},"org.apache.hadoop.record.RecordComparator":{"name":"org.apache.hadoop.record.RecordComparator","methods":{"int compare([B, int, int, [B, int, int)":{"name":"compare","returnType":"int","args":["[B","int","int","[B","int","int"],"exceptions":[]},"void define(java.lang.Class, org.apache.hadoop.record.RecordComparator)":{"name":"define","returnType":"void","args":["java.lang.Class","org.apache.hadoop.record.RecordComparator"],"exceptions":[]}}},"org.apache.hadoop.record.meta.RecordTypeInfo":{"name":"org.apache.hadoop.record.meta.RecordTypeInfo","methods":{"void setName(java.lang.String)":{"name":"setName","returnType":"void","args":["java.lang.String"],"exceptions":[]},"java.lang.String getName()":{"name":"getName","returnType":"java.lang.String","args":[],"exceptions":[]},"java.util.Collection getFieldTypeInfos()":{"name":"getFieldTypeInfos","returnType":"java.util.Collection","args":[],"exceptions":[]},"void serialize(org.apache.hadoop.rec
 ord.RecordOutput, java.lang.String) throws java.io.IOException":{"name":"serialize","returnType":"void","args":["org.apache.hadoop.record.RecordOutput","java.lang.String"],"exceptions":["java.io.IOException"]},"void deserialize(org.apache.hadoop.record.RecordInput, java.lang.String) throws java.io.IOException":{"name":"deserialize","returnType":"void","args":["org.apache.hadoop.record.RecordInput","java.lang.String"],"exceptions":["java.io.IOException"]},"void addField(java.lang.String, org.apache.hadoop.record.meta.TypeID)":{"name":"addField","returnType":"void","args":["java.lang.String","org.apache.hadoop.record.meta.TypeID"],"exceptions":[]},"int compareTo(java.lang.Object) throws java.lang.ClassCastException":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":["java.lang.ClassCastException"]},"org.apache.hadoop.record.meta.RecordTypeInfo getNestedStructTypeInfo(java.lang.String)":{"name":"getNestedStructTypeInfo","returnType":"org.apache.hadoop.reco
 rd.meta.RecordTypeInfo","args":["java.lang.String"],"exceptions":[]}}},"org.apache.hadoop.conf.Configuration":{"name":"org.apache.hadoop.conf.Configuration","methods":{"void addResource(org.apache.hadoop.fs.Path)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.fs.Path"],"exceptions":[]},"java.util.Set getFinalParameters()":{"name":"getFinalParameters","returnType":"java.util.Set","args":[],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String, java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setPattern(java.lang.String, java.util.regex.Pattern)":{"name":"setPattern","returnType":"void","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"int size()":{"name":"size","returnType":"int","args":[],"exceptions":[]},"void addResource(org.apache.hadoop.conf.Configuration)":{"name":"addResource","returnType":"void","args":["org.apache.hadoop.conf.Conf
 iguration"],"exceptions":[]},"java.util.List getInstances(java.lang.String, java.lang.Class)":{"name":"getInstances","returnType":"java.util.List","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void addResource(java.net.URL)":{"name":"addResource","returnType":"void","args":["java.net.URL"],"exceptions":[]},"void setFloat(java.lang.String, float)":{"name":"setFloat","returnType":"void","args":["java.lang.String","float"],"exceptions":[]},"void set(java.lang.String, java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"void setBooleanIfUnset(java.lang.String, boolean)":{"name":"setBooleanIfUnset","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"void reloadConfiguration()":{"name":"reloadConfiguration","returnType":"void","args":[],"exceptions":[]},"java.util.regex.Pattern getPattern(java.lang.String, java.util.regex.Pattern)":{"name":"getPat
 tern","returnType":"java.util.regex.Pattern","args":["java.lang.String","java.util.regex.Pattern"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"java.lang.String get(java.lang.String, java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setDeprecatedProperties()":{"name":"setDeprecatedProperties","returnType":"void","args":[],"exceptions":[]},"boolean onlyKeyExists(java.lang.String)":{"name":"onlyKeyExists","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.util.Iterator iterator()":{"name":"iterator","returnType":"java.util.Iterator","args":[],"exceptions":[]},"org.apache.hadoop.fs.Path getLocalPath(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"ge
 tLocalPath","returnType":"org.apache.hadoop.fs.Path","args":["java.lang.String","java.lang.String"],"exceptions":["java.io.IOException"]},"java.lang.Class getClassByName(java.lang.String) throws java.lang.ClassNotFoundException":{"name":"getClassByName","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":["java.lang.ClassNotFoundException"]},"java.io.InputStream getConfResourceAsInputStream(java.lang.String)":{"name":"getConfResourceAsInputStream","returnType":"java.io.InputStream","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void writeXml(java.io.Writer) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.Writer"],"exceptions":["java.io.IOException"]},"void clear()":{"name":"clear","returnType":"void","args":[],"exceptions":[]},"java.net.URL getResource(java.lang.Str
 ing)":{"name":"getResource","returnType":"java.net.URL","args":["java.lang.String"],"exceptions":[]},"java.net.InetSocketAddress updateConnectAddr(java.lang.String, java.lang.String, java.lang.String, java.net.InetSocketAddress)":{"name":"updateConnectAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"boolean getBoolean(java.lang.String, boolean)":{"name":"getBoolean","returnType":"boolean","args":["java.lang.String","boolean"],"exceptions":[]},"void main([Ljava.lang.String;) throws java.lang.Exception":{"name":"main","returnType":"void","args":["[Ljava.lang.String;"],"exceptions":["java.lang.Exception"]},"java.lang.Enum getEnum(java.lang.String, java.lang.Enum)":{"name":"getEnum","returnType":"java.lang.Enum","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void set(java.lang.String, java.lang.String)":{"name":"set","returnType":"void","args":["java.lang.Strin
 g","java.lang.String"],"exceptions":[]},"void setEnum(java.lang.String, java.lang.Enum)":{"name":"setEnum","returnType":"void","args":["java.lang.String","java.lang.Enum"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"[Ljava.lang.Class; getClasses(java.lang.String, [Ljava.lang.Class;)":{"name":"getClasses","returnType":"[Ljava.lang.Class;","args":["java.lang.String","[Ljava.lang.Class;"],"exceptions":[]},"float getFloat(java.lang.String, float)":{"name":"getFloat","returnType":"float","args":["java.lang.String","float"],"exceptions":[]},"long getLongBytes(java.lang.String, long)":{"name":"getLongBytes","returnType":"long","args":["java.lang.String","long"],"exceptions":[]},"java.lang.Class getClassByNameOrNull(java.lang.String)":{"name":"getClassByNameOrNull","returnType":"java.lang.Class","args":["java.lang.String"],"exceptions":[]},"void setStr
 ings(java.lang.String, [Ljava.lang.String;)":{"name":"setStrings","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void addDeprecations([Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;)":{"name":"addDeprecations","returnType":"void","args":["[Lorg.apache.hadoop.conf.Configuration$DeprecationDelta;"],"exceptions":[]},"[Ljava.lang.String; getPropertySources(java.lang.String)":{"name":"getPropertySources","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"org.apache.hadoop.conf.Configuration$IntegerRanges getRange(java.lang.String, java.lang.String)":{"name":"getRange","returnType":"org.apache.hadoop.conf.Configuration$IntegerRanges","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void setLong(java.lang.String, long)":{"name":"setLong","returnType":"void","args":["java.lang.String","long"],"exceptions":[]},"void setQuietMode(boolean)":{"name":"setQuietMode","returnType":"void","args":["boolean
 "],"exceptions":[]},"void setClassLoader(java.lang.ClassLoader)":{"name":"setClassLoader","returnType":"void","args":["java.lang.ClassLoader"],"exceptions":[]},"[C getPassword(java.lang.String) throws java.io.IOException":{"name":"getPassword","returnType":"[C","args":["java.lang.String"],"exceptions":["java.io.IOException"]},"void setTimeDuration(java.lang.String, long, java.util.concurrent.TimeUnit)":{"name":"setTimeDuration","returnType":"void","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void setDouble(java.lang.String, double)":{"name":"setDouble","returnType":"void","args":["java.lang.String","double"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;","java.lang.String"],"exceptions":[]},"java.lang.String get(java.lang.String)":{"name":"get","returnType":"java.lang.String","args":["java.lang.String"]
 ,"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class"],"exceptions":[]},"void setClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"setClass","returnType":"void","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"java.util.Collection getStringCollection(java.lang.String)":{"name":"getStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"java.io.File getFile(java.lang.String, java.lang.String) throws java.io.IOException":{"name":"getFile","returnType":"java.io.File","args":["java.lang.String","java.lang.St
 ring"],"exceptions":["java.io.IOException"]},"double getDouble(java.lang.String, double)":{"name":"getDouble","returnType":"double","args":["java.lang.String","double"],"exceptions":[]},"void setBoolean(java.lang.String, boolean)":{"name":"setBoolean","returnType":"void","args":["java.lang.String","boolean"],"exceptions":[]},"boolean isDeprecated(java.lang.String)":{"name":"isDeprecated","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getTrimmed(java.lang.String)":{"name":"getTrimmed","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void setInt(java.lang.String, int)":{"name":"setInt","returnType":"void","args":["java.lang.String","int"],"exceptions":[]},"void addDeprecation(java.lang.String, java.lang.String, java.lang.String)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","java.lang.String","java.lang.String"],"exceptions":[]},"long getLong(java.lang.String, long)":{"name":"getLong","retur
 nType":"long","args":["java.lang.String","long"],"exceptions":[]},"void addDeprecation(java.lang.String, [Ljava.lang.String;)":{"name":"addDeprecation","returnType":"void","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"void setAllowNullValueProperties(boolean)":{"name":"setAllowNullValueProperties","returnType":"void","args":["boolean"],"exceptions":[]},"java.util.Collection getTrimmedStringCollection(java.lang.String)":{"name":"getTrimmedStringCollection","returnType":"java.util.Collection","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.io.Reader getConfResourceAsReader(java.lang.String)":{"name":"getConfResourceAsReader","returnType":"java.io.Reader","args":["java.lang.String"],"exceptions":[]},"long getTimeDuration(java.lang.String, long, java.util.concurrent.Time
 Unit)":{"name":"getTimeDuration","returnType":"long","args":["java.lang.String","long","java.util.concurrent.TimeUnit"],"exceptions":[]},"void addResource(java.io.InputStream)":{"name":"addResource","returnType":"void","args":["java.io.InputStream"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","java.lang.String","int"],"exceptions":[]},"void dumpDeprecatedKeys()":{"name":"dumpDeprecatedKeys","returnType":"void","args":[],"exceptions":[]},"[I getInts(java.lang.String)":{"name":"getInts","returnType":"[I","args":["java.lang.String"],"exceptions":[]},"void addResource(java.lang.String)":{"name":"addResource","returnType":"void","args":["java.lang.String"],"exceptions":[]},"[Ljava.lang.String; getTrimmedStrings(java.lang.String, [Ljava.lang.String;)":{"name":"getTrimmedStrings","returnType":"[Ljava.lang.
 String;","args":["java.lang.String","[Ljava.lang.String;"],"exceptions":[]},"java.lang.Class getClass(java.lang.String, java.lang.Class, java.lang.Class)":{"name":"getClass","returnType":"java.lang.Class","args":["java.lang.String","java.lang.Class","java.lang.Class"],"exceptions":[]},"void setIfUnset(java.lang.String, java.lang.String)":{"name":"setIfUnset","returnType":"void","args":["java.lang.String","java.lang.String"],"exceptions":[]},"void unset(java.lang.String)":{"name":"unset","returnType":"void","args":["java.lang.String"],"exceptions":[]},"void dumpConfiguration(org.apache.hadoop.conf.Configuration, java.io.Writer) throws java.io.IOException":{"name":"dumpConfiguration","returnType":"void","args":["org.apache.hadoop.conf.Configuration","java.io.Writer"],"exceptions":["java.io.IOException"]},"[Ljava.lang.String; getStrings(java.lang.String)":{"name":"getStrings","returnType":"[Ljava.lang.String;","args":["java.lang.String"],"exceptions":[]},"void addResource(java.io.Input
 Stream, java.lang.String)":{"name":"addResource","returnType":"void","args":["java.io.InputStream","java.lang.String"],"exceptions":[]},"java.util.Map getValByRegex(java.lang.String)":{"name":"getValByRegex","returnType":"java.util.Map","args":["java.lang.String"],"exceptions":[]},"void setSocketAddr(java.lang.String, java.net.InetSocketAddress)":{"name":"setSocketAddr","returnType":"void","args":["java.lang.String","java.net.InetSocketAddress"],"exceptions":[]},"int getInt(java.lang.String, int)":{"name":"getInt","returnType":"int","args":["java.lang.String","int"],"exceptions":[]},"void writeXml(java.io.OutputStream) throws java.io.IOException":{"name":"writeXml","returnType":"void","args":["java.io.OutputStream"],"exceptions":["java.io.IOException"]},"java.lang.ClassLoader getClassLoader()":{"name":"getClassLoader","returnType":"java.lang.ClassLoader","args":[],"exceptions":[]},"void addDefaultResource(java.lang.String)":{"name":"addDefaultResource","returnType":"void","args":["j
 ava.lang.String"],"exceptions":[]},"java.net.InetSocketAddress getSocketAddr(java.lang.String, java.lang.String, int)":{"name":"getSocketAddr","returnType":"java.net.InetSocketAddress","args":["java.lang.String","java.lang.String","int"],"exceptions":[]},"boolean hasWarnedDeprecation(java.lang.String)":{"name":"hasWarnedDeprecation","returnType":"boolean","args":["java.lang.String"],"exceptions":[]},"java.lang.String getRaw(java.lang.String)":{"name":"getRaw","returnType":"java.lang.String","args":["java.lang.String"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.WritableFactories":{"name":"org.apache.hadoop.io.WritableFactories","methods":{"org.apache.hadoop.io.WritableFactory getFactory(java.lang.Class)":{"name":"getFactory","returnType":"org.apache.hadoop.io.WritableFactory","args":["java.lang.Class"],"exceptions":[
 ]},"void setFactory(java.lang.Class, org.apache.hadoop.io.WritableFactory)":{"name":"setFactory","returnType":"void","args":["java.lang.Class","org.apache.hadoop.io.WritableFactory"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Writable newInstance(java.lang.Class)":{"name":"newInstance","returnType":"org.apache.hadoop.io.Writable","args":["java.lang.Class"],"exceptions":[]}}},"org.apache.hadoop.io.SetFile":{"name":"org.apache.hadoop.io.SetFile","methods":{}},"org.apache.hadoop.record.compiler.JString":{"name":"org.apache.hadoop.record.compiler.JString","methods":{}},"org.apache.hadoop.record.compiler.JBoolean":{"name":"org.apache.hadoop.record.compiler.JBoolean","methods":{}},"org.apache.hadoop.io.ShortWritable":{"name":"org.apache.hadoop.io.S
 hortWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"int compareTo(org.apache.hadoop.io.ShortWritable)":{"name":"compareTo","returnType":"int","args":["org.apache.hadoop.io.ShortWritable"],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"short get()":{"name":"get","returnType":"short","args":[],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void set(short)":{"name":"set","returnType":"void","args":["short"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":
 {"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.InvalidPathException":{"name":"org.apache.hadoop.fs.InvalidPathException","methods":{}},"org.apache.hadoop.record.compiler.JVector":{"name":"org.apache.hadoop.record.compiler.JVector","methods":{}},"org.apache.hadoop.io.ArrayWritable":{"name":"org.apache.hadoop.io.ArrayWritable","methods":{"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void set([Lorg.apache.hadoop.io.Writable;)":{"name":"set","returnType":"void","args":["[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"[Ljava.lang.String; toStrings()":{"name":"toStrings","returnType":"[Ljava.lang.String;","args":[],"exceptions":[]},"java.lang.Class getValu
 eClass()":{"name":"getValueClass","returnType":"java.lang.Class","args":[],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.IntWritable":{"name":"org.apache.hadoop.io.IntWritable","methods":{"int hashCode()":{"name":"hashCode","returnType":"int","args":[],"exceptions":[]},"java.lang.String toString()":{"name":"toString","returnType":"java.lang.String","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set(int)":{"name":"set","returnType":"void","args":["int"],"exceptions":[]},"int compareTo(org.apache.hadoop.io.IntWritable)":{"name":"compareTo","returnType":"int","args":["or
 g.apache.hadoop.io.IntWritable"],"exceptions":[]},"int get()":{"name":"get","returnType":"int","args":[],"exceptions":[]},"int compareTo(java.lang.Object)":{"name":"compareTo","returnType":"int","args":["java.lang.Object"],"exceptions":[]},"boolean equals(java.lang.Object)":{"name":"equals","returnType":"boolean","args":["java.lang.Object"],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.io.TwoDArrayWritable":{"name":"org.apache.hadoop.io.TwoDArrayWritable","methods":{"[[Lorg.apache.hadoop.io.Writable; get()":{"name":"get","returnType":"[[Lorg.apache.hadoop.io.Writable;","args":[],"exceptions":[]},"void write(java.io.DataOutput) throws java.io.IOException":{"name":"write","returnType":"void","args":["java.io.DataOutput"],"exceptions":["java.io.IOException"]},"void set([[Lorg.apache.hadoop.io.Writable;)":{"name":"set","retur
 nType":"void","args":["[[Lorg.apache.hadoop.io.Writable;"],"exceptions":[]},"java.lang.Object toArray()":{"name":"toArray","returnType":"java.lang.Object","args":[],"exceptions":[]},"void readFields(java.io.DataInput) throws java.io.IOException":{"name":"readFields","returnType":"void","args":["java.io.DataInput"],"exceptions":["java.io.IOException"]}}},"org.apache.hadoop.fs.FSDataInputStream":{"name":"org.apache.hadoop.fs.FSDataInputStream","methods":{"void readFully(long, [B) throws java.io.IOException":{"name":"readFully","returnType":"void","args":["long","[B"],"exceptions":["java.io.IOException"]},"java.nio.ByteBuffer read(org.apache.hadoop.io.ByteBufferPool, int) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer","args":["org.apache.hadoop.io.ByteBufferPool","int"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void readFully(long, [B, int, int) throws java.io.IOException":{"n
 ame":"readFully","returnType":"void","args":["long","[B","int","int"],"exceptions":["java.io.IOException"]},"void unbuffer()":{"name":"unbuffer","returnType":"void","args":[],"exceptions":[]},"void seek(long) throws java.io.IOException":{"name":"seek","returnType":"void","args":["long"],"exceptions":["java.io.IOException"]},"long getPos() throws java.io.IOException":{"name":"getPos","returnType":"long","args":[],"exceptions":["java.io.IOException"]},"void setReadahead(java.lang.Long) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"setReadahead","returnType":"void","args":["java.lang.Long"],"exceptions":["java.lang.UnsupportedOperationException","java.io.IOException"]},"void releaseBuffer(java.nio.ByteBuffer)":{"name":"releaseBuffer","returnType":"void","args":["java.nio.ByteBuffer"],"exceptions":[]},"java.io.InputStream getWrappedStream()":{"name":"getWrappedStream","returnType":"java.io.InputStream","args":[],"exceptions":[]},"java.nio.ByteBuffer read(
 org.apache.hadoop.io.ByteBufferPool, int, java.util.EnumSet) throws java.lang.UnsupportedOperationException, java.io.IOException":{"name":"read","returnType":"java.nio.ByteBuffer

<TRUNCATED>