You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2017/03/23 17:28:10 UTC

[39/50] [abbrv] bigtop git commit: BIGTOP-2704. Include ODPi runtime tests option into the battery of smoke tests

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
new file mode 100644
index 0000000..d3861b9
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-bin.list
@@ -0,0 +1,3 @@
+mapred
+yarn
+container-executor

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
new file mode 100644
index 0000000..f62ee8e
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-client-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-client","version":"2.7.3","classes":{"org.apache.hadoop.yarn.client.api.YarnClient":{"name":"org.apache.hadoop.yarn.client.api.YarnClient","methods":{"java.util.List getQueueAclsInfo() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueAclsInfo","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClient createYarnClient()":{"name":"createYarnClient","returnType":"org.apache.hadoop.yarn.client.api.YarnClient","args":[],"exceptions":[]},"java.util.List getApplications(java.util.Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getNodeToLabels() throws org.apache.hadoop.yarn.exceptions.YarnExceptio
 n, java.io.IOException":{"name":"getNodeToLabels","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse updateReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"updateReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getAllQueues() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAllQueues","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apach
 e.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.YarnClientApplication createApplication() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"createApplication","returnType":"org.apache.hadoop.yarn.client.api.YarnClientApplication","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Set getClusterNodeLabels() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getClusterNodeLabels","returnType":"java.util.Set","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void moveApplicationAcrossQueues(org.apache
 .hadoop.yarn.api.records.ApplicationId, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"moveApplicationAcrossQueues","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport getApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.Token getRMDelegationToken(org.apache.hadoop.io.Text) throws org.apache.hadoop.yarn.exceptions.YarnException, 
 java.io.IOException":{"name":"getRMDelegationToken","returnType":"org.apache.hadoop.yarn.api.records.Token","args":["org.apache.hadoop.io.Text"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse submitReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apac
 he.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationId submitApplication(org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"submitApplication","returnType":"org.apache.hadoop.yarn.api.records.ApplicationId","args":["org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext"],"exceptions":["org.apache.hadoop.
 yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.security.token.Token getAMRMToken(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getAMRMToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnExcept
 ion","java.io.IOException"]},"java.util.List getRootQueueInfos() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getRootQueueInfos","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.QueueInfo getQueueInfo(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getQueueInfo","returnType":"org.apache.hadoop.yarn.api.records.QueueInfo","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getChildQueueInfos(java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getChildQueueInfos","returnType":"java.util.List","args":["java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.Map getLabelsToNodes(java.util
 .Set) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getLabelsToNodes","returnType":"java.util.Map","args":["java.util.Set"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse deleteReservation(org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"deleteReservation","returnType":"org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse","args":["org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.YarnClusterMetrics getYarnClusterMetrics() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getYarnClusterMetrics","returnType":"org.apache.hadoop.yarn.api.rec
 ords.YarnClusterMetrics","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getNodeReports([Lorg.apache.hadoop.yarn.api.records.NodeState;) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getNodeReports","returnType":"java.util.List","args":["[Lorg.apache.hadoop.yarn.api.records.NodeState;"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void killApplication(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"killApplication","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args
 ":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications(java.util.Set, java.util.EnumSet) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":["java.util.Set","java.util.EnumSet"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.NMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","methods":{"v
 oid setClient(org.apache.hadoop.yarn.client.api.NMClient)":{"name":"setClient","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMClient"],"exceptions":[]},"void setCallbackHandler(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"setCallbackHandler","returnType":"void","args":["org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler"],"exceptions":[]},"void getContainerStatusAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"getContainerStatusAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"void startContainerAsync(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext)":{"name":"startContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunch
 Context"],"exceptions":[]},"void stopContainerAsync(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId)":{"name":"stopContainerAsync","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient getClient()":{"name":"getClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler getCallbackHandler()":{"name":"getCallbackHandler","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.async.NMClientAsync createNMClientAsync(org.apache.hadoop.yarn.client.api.async.NMClientAsync$CallbackHandler)":{"name":"createNMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.NMClientAsync","args":["org.apache.hadoop.yarn.client.api.
 async.NMClientAsync$CallbackHandler"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AMRMClient":{"name":"org.apache.hadoop.yarn.client.api.AMRMClient","methods":{"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.util.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Prior
 ity","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.records.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.ap
 ache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptions":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],
 "exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"org.apache.hadoop.yarn.client.api.AMRMClient createAMRMClient()":{"name":"createAMRMClient","returnType":"org.apache.hadoop.yarn.client.api.AMRMClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse allocate(float) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":
 "allocate","returnType":"org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse","args":["float"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.YarnClientApplication":{"name":"org.apache.hadoop.yarn.client.api.YarnClientApplication","methods":{"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse getNewApplicationResponse()":{"name":"getNewApplicationResponse","returnType":"org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext getApplicationSubmissionContext()":{"name":"getApplicationSubmissionContext","returnType":"org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext","args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.AHSClient":{"name":"org.apache.hadoop.yarn.client.api.AHSClient","methods":{"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport g
 etApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttemptReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationAttemptReport","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplications() throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplications","returnType":"java.util.List","args":[],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getContainers(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainers","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationAttemptId"],"exceptions":["org
 .apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ApplicationReport getApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationReport","returnType":"org.apache.hadoop.yarn.api.records.ApplicationReport","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.AHSClient createAHSClient()":{"name":"createAHSClient","returnType":"org.apache.hadoop.yarn.client.api.AHSClient","args":[],"exceptions":[]},"org.apache.hadoop.yarn.api.records.ContainerReport getContainerReport(org.apache.hadoop.yarn.api.records.ContainerId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerReport","returnType":"org.apache.hadoop.yarn.api.records.ContainerReport","args":["or
 g.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"java.util.List getApplicationAttempts(org.apache.hadoop.yarn.api.records.ApplicationId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getApplicationAttempts","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.ApplicationId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]}}},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync":{"name":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","methods":{"void addContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"addContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void updateBlacklist(java.util.List, java.util.List)":{"name":"updateBlacklist","returnType":"void","args":["java.uti
 l.List","java.util.List"],"exceptions":[]},"java.util.List getMatchingRequests(org.apache.hadoop.yarn.api.records.Priority, java.lang.String, org.apache.hadoop.yarn.api.records.Resource)":{"name":"getMatchingRequests","returnType":"java.util.List","args":["org.apache.hadoop.yarn.api.records.Priority","java.lang.String","org.apache.hadoop.yarn.api.records.Resource"],"exceptions":[]},"void waitFor(com.google.common.base.Supplier) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.r
 ecords.Resource getAvailableResources()":{"name":"getAvailableResources","returnType":"org.apache.hadoop.yarn.api.records.Resource","args":[],"exceptions":[]},"void waitFor(com.google.common.base.Supplier, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int"],"exceptions":["java.lang.InterruptedException"]},"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync createAMRMClientAsync(org.apache.hadoop.yarn.client.api.AMRMClient, int, org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler)":{"name":"createAMRMClientAsync","returnType":"org.apache.hadoop.yarn.client.api.async.AMRMClientAsync","args":["org.apache.hadoop.yarn.client.api.AMRMClient","int","org.apache.hadoop.yarn.client.api.async.AMRMClientAsync$CallbackHandler"],"exceptions":[]},"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse registerApplicationMaster(java.lang.String, int, java.lang.String) throws
  org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"registerApplicationMaster","returnType":"org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse","args":["java.lang.String","int","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void unregisterApplicationMaster(org.apache.hadoop.yarn.api.records.FinalApplicationStatus, java.lang.String, java.lang.String) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"unregisterApplicationMaster","returnType":"void","args":["org.apache.hadoop.yarn.api.records.FinalApplicationStatus","java.lang.String","java.lang.String"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"void waitFor(com.google.common.base.Supplier, int, int) throws java.lang.InterruptedException":{"name":"waitFor","returnType":"void","args":["com.google.common.base.Supplier","int","int"],"exceptio
 ns":["java.lang.InterruptedException"]},"int getClusterNodeCount()":{"name":"getClusterNodeCount","returnType":"int","args":[],"exceptions":[]},"void releaseAssignedContainer(org.apache.hadoop.yarn.api.records.ContainerId)":{"name":"releaseAssignedContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId"],"exceptions":[]},"void removeContainerRequest(org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest)":{"name":"removeContainerRequest","returnType":"void","args":["org.apache.hadoop.yarn.client.api.AMRMClient$ContainerRequest"],"exceptions":[]},"void setHeartbeatInterval(int)":{"name":"setHeartbeatInterval","returnType":"void","args":["int"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.api.NMClient":{"name":"org.apache.hadoop.yarn.client.api.NMClient","methods":{"void stopContainer(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOExc
 eption":{"name":"stopContainer","returnType":"void","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.api.records.ContainerStatus getContainerStatus(org.apache.hadoop.yarn.api.records.ContainerId, org.apache.hadoop.yarn.api.records.NodeId) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"getContainerStatus","returnType":"org.apache.hadoop.yarn.api.records.ContainerStatus","args":["org.apache.hadoop.yarn.api.records.ContainerId","org.apache.hadoop.yarn.api.records.NodeId"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMTokenCache getNMTokenCache()":{"name":"getNMTokenCache","returnType":"org.apache.hadoop.yarn.client.api.NMTokenCache","args":[],"exceptions":[]},"org.apache.hadoop.yarn.client.api.NMClient creat
 eNMClient()":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":[],"exceptions":[]},"java.util.Map startContainer(org.apache.hadoop.yarn.api.records.Container, org.apache.hadoop.yarn.api.records.ContainerLaunchContext) throws org.apache.hadoop.yarn.exceptions.YarnException, java.io.IOException":{"name":"startContainer","returnType":"java.util.Map","args":["org.apache.hadoop.yarn.api.records.Container","org.apache.hadoop.yarn.api.records.ContainerLaunchContext"],"exceptions":["org.apache.hadoop.yarn.exceptions.YarnException","java.io.IOException"]},"org.apache.hadoop.yarn.client.api.NMClient createNMClient(java.lang.String)":{"name":"createNMClient","returnType":"org.apache.hadoop.yarn.client.api.NMClient","args":["java.lang.String"],"exceptions":[]},"void setNMTokenCache(org.apache.hadoop.yarn.client.api.NMTokenCache)":{"name":"setNMTokenCache","returnType":"void","args":["org.apache.hadoop.yarn.client.api.NMTokenCache"],"exceptions":[]},"void 
 cleanupRunningContainersOnStop(boolean)":{"name":"cleanupRunningContainersOnStop","returnType":"void","args":["boolean"],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
new file mode 100644
index 0000000..b394bff
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-common-2.7.3-api-report.json
@@ -0,0 +1 @@
+{"name":"hadoop-yarn-common","version":"2.7.3","classes":{"org.apache.hadoop.yarn.security.ContainerTokenSelector":{"name":"org.apache.hadoop.yarn.security.ContainerTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTo
 kenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.SchedulerSecurityInfo":{"name":"org.apache.hadoop.yarn.security.SchedulerSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.util.SystemClock":{"name":"org.apache.hadoop.yarn.util.SystemClock","methods":{"long getTime()":{"name":"getTime","returnType":"long",
 "args":[],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector":{"name":"org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector","methods":{"org.apache.hadoop.security.token.Token selectToken(org.apache.hadoop.io.Text, java.util.Collection)":{"name":"selectToken","returnType":"org.apache.hadoop.security.token.Token","args":["org.apache.hadoop.io.Text","java.util.Collection"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo":{"name":"org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","r
 eturnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo":{"name":"org.apache.hadoop.yarn.security.admin.AdminSecurityInfo","methods":{"org.apache.hadoop.security.KerberosInfo getKerberosInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getKerberosInfo","returnType":"org.apache.hadoop.security.KerberosInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.security.token.TokenInfo getTokenInfo(java.lang.Class, org.apache.hadoop.conf.Configuration)":{"name":"getTokenInfo","returnType":"org.apache.hadoop.security.token.TokenInfo","args":["java.lang.Class","org.apache.hadoop.conf.Configuration"],"exceptions":[]}}},"org.apache.hadoop.yarn.client.ClientRMProxy":{"name":"org.apache.hadoop.yarn.client.ClientRMProxy","methods":{"org.apache.hadoop.io.Text getRMDelegationTokenService
 (org.apache.hadoop.conf.Configuration)":{"name":"getRMDelegationTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getAMRMTokenService(org.apache.hadoop.conf.Configuration)":{"name":"getAMRMTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration"],"exceptions":[]},"org.apache.hadoop.io.Text getTokenService(org.apache.hadoop.conf.Configuration, java.lang.String, java.lang.String, int)":{"name":"getTokenService","returnType":"org.apache.hadoop.io.Text","args":["org.apache.hadoop.conf.Configuration","java.lang.String","java.lang.String","int"],"exceptions":[]},"java.lang.Object createRMProxy(org.apache.hadoop.conf.Configuration, java.lang.Class) throws java.io.IOException":{"name":"createRMProxy","returnType":"java.lang.Object","args":["org.apache.hadoop.conf.Configuration","java.lang.Class"],"exceptions":["java.io.IOException"]}}},"org.apache.had
 oop.yarn.util.Clock":{"name":"org.apache.hadoop.yarn.util.Clock","methods":{"long getTime()":{"name":"getTime","returnType":"long","args":[],"exceptions":[]}}}}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
new file mode 100644
index 0000000..26613d4
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn-jar.list
@@ -0,0 +1,38 @@
+netty-3\.6\.2\.Final[\.\-_].*jar
+leveldbjni-all-1\.8[\.\-_].*jar
+jackson-core-asl-1\.9\.13[\.\-_].*jar
+jackson-xc-1\.9\.13[\.\-_].*jar
+jersey-server-1\.9[\.\-_].*jar
+stax-api-1\.0-2[\.\-_].*jar
+zookeeper-3\.4\.6[\.\-_].*jar
+guice-3\.0[\.\-_].*jar
+jaxb-impl-2\.2\.3-1[\.\-_].*jar
+zookeeper-3\.4\.6.*-tests\.jar
+jersey-client-1\.9[\.\-_].*jar
+commons-cli-1\.2[\.\-_].*jar
+log4j-1\.2\.17[\.\-_].*jar
+jackson-mapper-asl-1\.9\.13[\.\-_].*jar
+guava-11\.0\.2[\.\-_].*jar
+jetty-6\.1\.26[\.\-_].*jar
+commons-logging-1\.1\.3[\.\-_].*jar
+jersey-core-1\.9[\.\-_].*jar
+jersey-guice-1\.9[\.\-_].*jar
+commons-compress-1\.4\.1[\.\-_].*jar
+jettison-1\.1[\.\-_].*jar
+commons-collections-3\.2\.[12][\.\-_].*jar
+xz-1\.0[\.\-_].*jar
+asm-3\.2[\.\-_].*jar
+commons-codec-1\.4[\.\-_].*jar
+aopalliance-1\.0[\.\-_].*jar
+javax\.inject-1[\.\-_].*jar
+commons-lang-2\.6[\.\-_].*jar
+jetty-util-6\.1\.26[\.\-_].*jar
+jsr305-3\.0\.0[\.\-_].*jar
+protobuf-java-2\.5\.0[\.\-_].*jar
+commons-io-2\.4[\.\-_].*jar
+activation-1\.1[\.\-_].*jar
+jersey-json-1\.9[\.\-_].*jar
+jaxb-api-2\.2\.2[\.\-_].*jar
+guice-servlet-3\.0[\.\-_].*jar
+servlet-api-2\.5[\.\-_].*jar
+jackson-jaxrs-1\.9\.13[\.\-_].*jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
new file mode 100644
index 0000000..bb88005
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/hadoop-yarn.list
@@ -0,0 +1,74 @@
+hadoop-yarn-server-sharedcachemanager.*\.jar
+bin
+bin/mapred
+bin/container-executor
+bin/yarn
+sbin
+sbin/yarn-daemon\.sh
+sbin/yarn-daemons\.sh
+hadoop-yarn-registry-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-unmanaged-am-launcher-2\.7\.[0-9].*\.jar
+hadoop-yarn-common-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-nodemanager.*\.jar
+hadoop-yarn-server-applicationhistoryservice-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-common.*\.jar
+etc
+etc/hadoop
+hadoop-yarn-server-common-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-tests.*\.jar
+hadoop-yarn-server-resourcemanager.*\.jar
+hadoop-yarn-server-web-proxy.*\.jar
+hadoop-yarn-api-2\.7\.[0-9].*\.jar
+hadoop-yarn-common.*\.jar
+hadoop-yarn-server-web-proxy-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-distributedshell-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-tests-2\.7\.[0-9].*\.jar
+hadoop-yarn-server-resourcemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-registry.*\.jar
+hadoop-yarn-server-sharedcachemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-client-2\.7\.[0-9].*\.jar
+hadoop-yarn-applications-distributedshell.*\.jar
+hadoop-yarn-server-nodemanager-2\.7\.[0-9].*\.jar
+hadoop-yarn-api.*\.jar
+hadoop-yarn-client.*\.jar
+lib
+lib/commons-cli-1\.2.*\.jar
+lib/leveldbjni-all-1\.8.*\.jar
+lib/jaxb-api-2\.2\.2.*\.jar
+lib/jettison-1\.1.*\.jar
+lib/commons-io-2\.4.*\.jar
+lib/jetty-util-6\.1\.26.*\.jar
+lib/jaxb-impl-2\.2\.3-1.*\.jar
+lib/jersey-guice-1\.9.*\.jar
+lib/netty-3\.6\.2\.Final.*\.jar
+lib/jersey-core-1\.9.*\.jar
+lib/jackson-mapper-asl-1\.9\.13.*\.jar
+lib/asm-3\.2.*\.jar
+lib/commons-compress-1\.4\.1.*\.jar
+lib/aopalliance-1\.0.*\.jar
+lib/jackson-xc-1\.9\.13.*\.jar
+lib/jersey-json-1\.9.*\.jar
+lib/commons-codec-1\.4.*\.jar
+lib/jackson-core-asl-1\.9\.13.*\.jar
+lib/servlet-api-2\.5.*\.jar
+lib/jetty-6\.1\.26.*\.jar
+lib/jersey-server-1\.9.*\.jar
+lib/log4j-1\.2\.17.*\.jar
+lib/zookeeper-3\.4\.6.*-tests\.jar
+lib/stax-api-1\.0-2.*\.jar
+lib/jersey-client-1\.9.*\.jar
+lib/xz-1\.0.*\.jar
+lib/zookeeper-3\.4\.6.*\.jar
+lib/activation-1\.1.*\.jar
+lib/javax\.inject-1.*\.jar
+lib/protobuf-java-2\.5\.0.*\.jar
+lib/guice-3\.0.*\.jar
+lib/guava-11\.0\.2.*\.jar
+lib/jsr305-3\.0\.0.*\.jar
+lib/jackson-jaxrs-1\.9\.13.*\.jar
+lib/commons-collections-3\.2\.[1-2].*\.jar
+lib/commons-logging-1\.1\.3.*\.jar
+lib/commons-lang-2\.6.*\.jar
+lib/guice-servlet-3\.0.*\.jar
+hadoop-yarn-server-applicationhistoryservice.*\.jar
+hadoop-yarn-applications-unmanaged-am-launcher.*\.jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
----------------------------------------------------------------------
diff --git a/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
new file mode 100644
index 0000000..339de4c
--- /dev/null
+++ b/bigtop-tests/smoke-tests/odpi-runtime/src/test/resources/testRuntimeSpecConf.groovy
@@ -0,0 +1,430 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+specs {
+  tests {
+    'HADOOP_EJH1' {
+      name = 'HADOOP_EJH1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'JAVA_HOME'
+      }
+    }
+    'HADOOP_EC1' {
+      name = 'HADOOP_EC1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_TOOLS_PATH'
+        donotcheckexistance = true
+      }
+    }
+    'HADOOP_EC2' {
+      name = 'HADOOP_EC2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_HOME'
+      }
+    }
+    'HADOOP_EC3' {
+      name = 'HADOOP_EC3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EC4' {
+      name = 'HADOOP_EC4'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_COMMON_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EC5' {
+      name = 'HADOOP_EC5'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hadoop envvars'
+        variable = 'HADOOP_CONF_DIR'
+      }
+    }
+    'HADOOP_EH1' {
+      name = 'HADOOP_EH1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HADOOP_HDFS_HOME'
+      }
+    }
+    'HADOOP_EH2' {
+      name = 'HADOOP_EH2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HDFS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EH3' {
+      name = 'HADOOP_EH3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'hdfs envvars'
+        variable = 'HDFS_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EY1' {
+      name = 'HADOOP_EY1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'HADOOP_YARN_HOME'
+      }
+    }
+    'HADOOP_EY2' {
+      name = 'HADOOP_EY2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'YARN_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EY3' {
+      name = 'HADOOP_EY3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'yarn envvars'
+        variable = 'YARN_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EM1' {
+      name = 'HADOOP_EM1'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'HADOOP_MAPRED_HOME'
+      }
+    }
+    'HADOOP_EM2' {
+      name = 'HADOOP_EM2'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'MAPRED_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EM3' {
+      name = 'HADOOP_EM3'
+      type = 'envdir'
+      arguments {
+        envcmd = 'mapred envvars'
+        variable = 'MAPRED_LIB_JARS_DIR'
+        relative = true
+      }
+    }
+    'HADOOP_EJH2_HADOOP' {
+      name = 'HADOOP_EJH2_HADOOP'
+      type = 'shell'
+      arguments {
+        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/hadoop-env.sh'
+        message = 'JAVA_HOME is not set'
+      }
+    }
+    'HADOOP_EJH2_YARN' {
+      name = 'HADOOP_EJH2_YARN'
+      type = 'shell'
+      arguments {
+        command = '[ "${JAVA_HOME}xxx" != "xxx" ] || grep -E "^\\s*export\\s+JAVA_HOME=[\\w/]+" `hadoop envvars | grep HADOOP_CONF_DIR | sed "s|[^=]\\+=\'\\([^\']\\+\\)\'$|\\1|g"`/yarn-env.sh'
+        message = 'JAVA_HOME is not set'
+      }
+    }
+    'HADOOP_PLATVER_1' {
+      name = 'HADOOP_PLATVER'
+      type = 'shell'
+      arguments {
+        command = 'hadoop version | head -n 1 | grep -E \'Hadoop\\s+[0-9\\.]+[_\\-][A-Za-z_0-9]+\''
+        message = 'Hadoop\'s version string is not correct'
+      }
+    }
+    'HADOOP_DIRSTRUCT_COMMON' {
+      name = 'HADOOP_DIRSTRUCT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        referenceList = 'hadoop-common.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_HDFS' {
+      name = 'HADOOP_DIRSTRUCT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        referenceList = 'hadoop-hdfs.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_MAPRED' {
+      name = 'HADOOP_DIRSTRUCT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        referenceList = 'hadoop-mapreduce.list'
+      }
+    }
+    'HADOOP_DIRSTRUCT_YARN' {
+      name = 'HADOOP_DIRSTRUCT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        referenceList = 'hadoop-yarn.list'
+      }
+    }
+    'HADOOP_SUBPROJS' {
+      name = 'HADOOP_SUBPROJS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        referenceList = 'hadoop-subprojs.list'
+      }
+    }
+    'HADOOP_BINCONTENT_COMMON' {
+      name = 'HADOOP_BINCONTENT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-common-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_HDFS' {
+      name = 'HADOOP_BINCONTENT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-hdfs-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_MAPRED' {
+      name = 'HADOOP_BINCONTENT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-mapreduce-bin.list'
+      }
+    }
+    'HADOOP_BINCONTENT_YARN' {
+      name = 'HADOOP_BINCONTENT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        subDir = 'bin'
+        referenceList = 'hadoop-yarn-bin.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_COMMON' {
+      name = 'HADOOP_JARCONTENT_COMMON'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hadoop envvars'
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        subDirEnv = 'HADOOP_COMMON_LIB_JARS_DIR'
+        referenceList = 'hadoop-common-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_HDFS' {
+      name = 'HADOOP_JARCONTENT_HDFS'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'hdfs envvars'
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        subDirEnv = 'HDFS_LIB_JARS_DIR'
+        referenceList = 'hadoop-hdfs-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_MAPRED' {
+      name = 'HADOOP_JARCONTENT_MAPRED'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'mapred envvars'
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        subDirEnv = 'MAPRED_LIB_JARS_DIR'
+        referenceList = 'hadoop-mapreduce-jar.list'
+      }
+    }
+    'HADOOP_LIBJARSCONTENT_YARN' {
+      name = 'HADOOP_JARCONTENT_YARN'
+      type = 'dirstruct'
+      arguments {
+        envcmd = 'yarn envvars'
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        subDirEnv = 'YARN_LIB_JARS_DIR'
+        referenceList = 'hadoop-yarn-jar.list'
+      }
+    }
+    'HADOOP_GETCONF' {
+      name = 'HADOOP_GETCONF'
+      type = 'shell'
+      arguments {
+        command = '[ `hdfs getconf -confKey dfs.permissions.superusergroup >/dev/null 2>/dev/null; echo $?` == "0" ]'
+        message = 'It\' not possible to to determine key Hadoop configuration values by using ${HADOOP_HDFS_HOME}/bin/hdfs getconf'
+      }
+    }
+    'HADOOP_CNATIVE1' {
+      name = 'HADOOP_CNATIVE1'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep hadoop | grep true'
+        message = 'hadoop-common-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_CNATIVE2' {
+      name = 'HADOOP_CNATIVE2'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
+        message = 'hadoop-common-project must be build with -Prequire.snappy'
+      }
+    }
+    'HADOOP_HNATIVE1' {
+      name = 'HADOOP_HNATIVE1'
+      type = 'shell'
+      arguments {
+        command = '[ ! -n ${HADOOP_COMMON_HOME} ] || HADOOP_COMMON_HOME=`hadoop envvars | grep HADOOP_COMMON_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
+            'test -e $HADOOP_COMMON_HOME/lib/native/libhdfs.a'
+        message = 'hadoop-hdfs-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_YNATIVE1' {
+      name = 'HADOOP_YNATIVE1'
+      type = 'shell'
+      arguments {
+        command = '[ ! -n ${HADOOP_YARN_HOME} ] || HADOOP_YARN_HOME=`yarn envvars | grep HADOOP_YARN_HOME | sed "s/.*=\'\\(.*\\)\'/\\1/"`; '+
+            'echo $HADOOP_YARN_HOME; test -e $HADOOP_YARN_HOME/bin/container-executor'
+        message = 'hadoop-yarn-project must be build with -Pnative or -Pnative-win'
+      }
+    }
+    'HADOOP_MNATIVE1' {
+      name = 'HADOOP_MNATIVE1'
+      type = 'shell'
+      arguments {
+        command = 'hadoop checknative -a 2>/dev/null | grep snappy | grep true'
+        message = 'hadoop-mapreduce-project must be build with -Prequire.snappy'
+      }
+    }
+    'HADOOP_COMPRESSION' {
+      name = 'HADOOP_COMPRESSION'
+      type = 'shell'
+      arguments {
+        command = '[[ "$(hadoop checknative -a 2>/dev/null | egrep -e ^zlib -e ^snappy | sort -u | grep true | wc -l)" == 2 ]]'
+        message = 'hadoop must be built with -Dcompile.native=true'
+      }
+    }
+    'HADOOP_TOOLS' {
+      name = 'HADOOP_TOOLS'
+      type = 'hadoop_tools'
+      arguments {
+      }
+    }
+    'HADOOP_API1' {
+      name = "HADOOP_API1"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_COMMON_HOME'
+        libDir = 'HADOOP_COMMON_DIR'
+        envcmd = 'hadoop envvars'
+        jar = 'hadoop-common'
+        resourceFile = 'hadoop-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API2' {
+      name = "HADOOP_API2"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_HDFS_HOME'
+        libDir = 'HDFS_DIR'
+        envcmd = 'hdfs envvars'
+        jar = 'hadoop-hdfs'
+        resourceFile = 'hadoop-hdfs-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API3' {
+      name = "HADOOP_API3"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-common'
+        resourceFile = 'hadoop-yarn-common-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API4' {
+      name = "HADOOP_API4"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-client'
+        resourceFile = 'hadoop-yarn-client-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API5' {
+      name = "HADOOP_API5"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_YARN_HOME'
+        libDir = 'YARN_DIR'
+        envcmd = 'yarn envvars'
+        jar = 'hadoop-yarn-api'
+        resourceFile = 'hadoop-yarn-api-2.7.3-api-report.json'
+      }
+    }
+    'HADOOP_API6' {
+      name = "HADOOP_API6"
+      type = 'api_examination'
+      arguments {
+        baseDirEnv = 'HADOOP_MAPRED_HOME'
+        libDir = 'MAPRED_DIR'
+        envcmd = 'mapred envvars'
+        jar = 'hadoop-mapreduce-client-core'
+        resourceFile = 'hadoop-mapreduce-client-core-2.7.3-api-report.json'
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/README.md
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/README.md b/bigtop-tests/spec-tests/README.md
deleted file mode 100644
index 8fde997..0000000
--- a/bigtop-tests/spec-tests/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Test suite to validate Hadoop basic specifications
-==================================================
-
-The test suite is intended to be used as a validation tool to make sure that a
-Hadoop stack derived from Apache Bigtop is still compliant with it. The
-minimalistic way of doing so would be to guarantee compatibility of the
-environment, binaries layouts, certain configuration parameters, and so on.
-
-Validation test suite for the specs is vaguely based on Apache Bigtop iTest and
-consists of two essential parts: a configuration file, communicating the 
-functional commands and expected outcome(s) of it; and the test driver to run
-the commands and compare the results.
- 
-Running the tests
-=================
-
-Tests could be executed by running the following command 
-```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
-```
-=======
-consists of two essential parts: a configuration file, communicating the
-functional commands and expected outcome(s) of it; and the test driver to run
-the commands and compare the results.
-
-Running the tests
-=================
-
-Tests could be executed by running the following command
-```
-  gradle :bigtop-tests:spec-tests:runtime:test -Pspec.tests --info
-```
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/build.gradle b/bigtop-tests/spec-tests/build.gradle
deleted file mode 100644
index b0a6715..0000000
--- a/bigtop-tests/spec-tests/build.gradle
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-subprojects {
-  /**
-   *  Utility function for tests to use to confirm EVN Variables.
-   */
-  ext.checkEnv = { env_vars ->
-    env_vars.each() {
-      def value = System.getenv("${it}")
-      if (value == null || value == "null")
-        throw new GradleException("undeclared env variable: ${it}")
-    }
-  }
-
-  ext.groovyVersion = '1.8.0'
-  ext.hadoopVersion = '2.6.0'
-  // itest needs be greater than or equal to = 1.0.0
-  ext.itestVersion = '1.0.0' // Might need to be able to read an input for alternate version?
-  ext.BIGTOP_HOME   = rootDir
-
-  dependencies {
-    compile group: 'org.apache.bigtop.itest', name: 'itest-common', version: itestVersion, transitive: 'true'
-    //needed to avoid groovy not on classpath error.
-    testCompile group: 'org.codehaus.groovy', name: 'groovy', version: groovyVersion
-    testRuntime project(':bigtop-tests:smoke-tests:logger-test-config')
-  }
-
-  test.doFirst {
-    // TestHadoopExamples and other tests rely on BIGTOP_HOME environment
-    // variable to find some resources. Let's set it up, using ext.BIGTOP_HOME
-    environment ("BIGTOP_HOME", BIGTOP_HOME)
-  }
-
-  test << {
-    println("Now testing...");
-    //todo, add back in 'basic' after BIGTOP-1392 .
-    testLogging {
-      events "passed", "skipped", "failed"
-    }
-  }
-
-  // Let's make sure all system Properties are passed into the forked test JVM
-  tasks.withType(Test) {
-    systemProperties = System.getProperties()
-  }
-  test.dependsOn compileGroovy
-  compileGroovy.dependsOn clean
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/build.gradle
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/build.gradle b/bigtop-tests/spec-tests/runtime/build.gradle
deleted file mode 100644
index 97e3635..0000000
--- a/bigtop-tests/spec-tests/runtime/build.gradle
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-def junitVersion = '4.11'
-
-apply plugin: 'java'
-
-repositories {
-  maven {
-    url "http://conjars.org/repo/"
-  }
-}
-dependencies {
-  compile group: 'junit', name: 'junit', version: junitVersion, transitive: 'true'
-  compile group: 'commons-logging', name: 'commons-logging', version: '1.1.3'
-  compile group: 'org.apache.commons', name: 'commons-exec', version: '1.3'
-  compile group: 'org.apache.hive', name: 'hive-jdbc', version: '1.2.1'
-  compile group: 'org.apache.hive', name: 'hive-metastore', version: '1.2.1'
-  compile group: 'org.apache.hive', name: 'hive-common', version: '1.2.1'
-  compile group: 'org.apache.thrift', name: 'libfb303', version: '0.9.3'
-  compile group: 'org.apache.thrift', name: 'libthrift', version: '0.9.3'
-  compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '2.7.2'
-  compile group: 'org.apache.hive.hcatalog', name: 'hive-hcatalog-core', version: '1.2.1'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '2.7.2'
-  compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-jobclient', version: '2.7.2'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-common', version: '2.7.2'
-  testCompile group: 'org.apache.hadoop', name: 'hadoop-hdfs', version: '2.7.2'
-  testCompile group: 'org.apache.hive', name: 'hive-exec', version: '1.2.1'
-  testCompile "junit:junit:4.11"
-  if (System.env.HADOOP_CONF_DIR) testRuntime files(System.env.HADOOP_CONF_DIR)
-}
-
-jar {
-    from {
-        (configurations.runtime).grep{it.toString() =~ /(hive|libfb303)-.*[jw]ar$/}.collect {
-              zipTree(it)
-        }
-    }
-
-    exclude 'META-INF/*.RSA', 'META-INF/*.SF','META-INF/*.DSA'
-}
-
-test {
-  // Change the default location where test data is picked up
-  systemProperty 'test.resources.dir', "${buildDir}/resources/test/"
-  systemProperty 'odpi.test.hive.hcat.job.jar', jar.archivePath
-  systemProperty 'odpi.test.hive.hcat.core.jar', (configurations.runtime).find { it.toString() =~ /hive-hcatalog-core-.*jar$/ }
-}
-test.dependsOn jar

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
deleted file mode 100644
index d95c010..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hadoop/ApiExaminer.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hadoop;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * A tool that generates API conformance tests for Hadoop libraries
- */
-public class ApiExaminer {
-
-  private static final Log LOG = LogFactory.getLog(ApiExaminer.class.getName());
-
-  static private Set<String> unloadableClasses;
-
-  private List<String> errors;
-  private List<String> warnings;
-
-  static {
-    unloadableClasses = new HashSet<>();
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsMapping");
-    unloadableClasses.add("org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping");
-    unloadableClasses.add("org.apache.hadoop.io.compress.lz4.Lz4Compressor");
-    unloadableClasses.add("org.apache.hadoop.record.compiler.ant.RccTask");
-
-  }
-
-  public static void main(String[] args) {
-    Options options = new Options();
-
-    options.addOption("c", "compare", true,
-        "Compare against a spec, argument is the json file containing spec");
-    options.addOption("h", "help", false, "You're looking at it");
-    options.addOption("j", "jar", true, "Jar to examine");
-    options.addOption("p", "prepare-spec", true,
-        "Prepare the spec, argument is the directory to write the spec to");
-
-    try {
-      CommandLine cli = new GnuParser().parse(options, args);
-
-      if (cli.hasOption('h')) {
-        usage(options);
-        return;
-      }
-
-      if ((!cli.hasOption('c') && !cli.hasOption('p')) ||
-          (cli.hasOption('c') && cli.hasOption('p'))) {
-        System.err.println("You must choose either -c or -p");
-        usage(options);
-        return;
-      }
-
-      if (!cli.hasOption('j')) {
-        System.err.println("You must specify the jar to prepare or compare");
-        usage(options);
-        return;
-      }
-
-      String jar = cli.getOptionValue('j');
-      ApiExaminer examiner = new ApiExaminer();
-
-      if (cli.hasOption('c')) {
-        examiner.compareAgainstStandard(cli.getOptionValue('c'), jar);
-      } else if (cli.hasOption('p')) {
-        examiner.prepareExpected(jar, cli.getOptionValue('p'));
-      }
-    } catch (Exception e) {
-      System.err.println("Received exception while processing");
-      e.printStackTrace();
-    }
-  }
-
-  private static void usage(Options options) {
-    HelpFormatter help = new HelpFormatter();
-    help.printHelp("api-examiner", options);
-
-  }
-
-  private ApiExaminer() {
-  }
-
-  private void prepareExpected(String jarFile, String outputDir) throws IOException,
-      ClassNotFoundException {
-    JarInfo jarInfo = new JarInfo(jarFile, this);
-    jarInfo.dumpToFile(new File(outputDir));
-  }
-
-  private void compareAgainstStandard(String json, String jarFile) throws IOException,
-      ClassNotFoundException {
-    errors = new ArrayList<>();
-    warnings = new ArrayList<>();
-    JarInfo underTest = new JarInfo(jarFile, this);
-    JarInfo standard = jarInfoFromFile(new File(json));
-    standard.compareAndReport(underTest);
-
-    if (errors.size() > 0) {
-      System.err.println("Found " + errors.size() + " incompatibilities:");
-      for (String error : errors) {
-        System.err.println(error);
-      }
-    }
-
-    if (warnings.size() > 0) {
-      System.err.println("Found " + warnings.size() + " possible issues: ");
-      for (String warning : warnings) {
-        System.err.println(warning);
-      }
-    }
-
-
-  }
-
-  private JarInfo jarInfoFromFile(File inputFile) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JarInfo jarInfo = mapper.readValue(inputFile, JarInfo.class);
-    jarInfo.patchUpClassBackPointers(this);
-    return jarInfo;
-  }
-
-  private static class JarInfo {
-    String name;
-    String version;
-    ApiExaminer container;
-    Map<String, ClassInfo> classes;
-
-    // For use by Jackson
-    public JarInfo() {
-
-    }
-
-    JarInfo(String jarFile, ApiExaminer container) throws IOException, ClassNotFoundException {
-      this.container = container;
-      LOG.info("Processing jar " + jarFile);
-      File f = new File(jarFile);
-      Pattern pattern = Pattern.compile("(hadoop-[a-z\\-]+)-([0-9]\\.[0-9]\\.[0-9]).*");
-      Matcher matcher = pattern.matcher(f.getName());
-      if (!matcher.matches()) {
-        String msg = "Unable to determine name and version from " + f.getName();
-        LOG.error(msg);
-        throw new RuntimeException(msg);
-      }
-      name = matcher.group(1);
-      version = matcher.group(2);
-      classes = new HashMap<>();
-
-      JarFile jar = new JarFile(jarFile);
-      Enumeration<JarEntry> entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        String name = entries.nextElement().getName();
-        if (name.endsWith(".class")) {
-          name = name.substring(0, name.length() - 6);
-          name = name.replace('/', '.');
-          if (!unloadableClasses.contains(name)) {
-            LOG.debug("Processing class " + name);
-            Class<?> clazz = Class.forName(name);
-            if (clazz.getAnnotation(InterfaceAudience.Public.class) != null &&
-                clazz.getAnnotation(InterfaceStability.Stable.class) != null) {
-              classes.put(name, new ClassInfo(this, clazz));
-            }
-          }
-        }
-      }
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public String getVersion() {
-      return version;
-    }
-
-    public void setVersion(String version) {
-      this.version = version;
-    }
-
-    public Map<String, ClassInfo> getClasses() {
-      return classes;
-    }
-
-    public void setClasses(Map<String, ClassInfo> classes) {
-      this.classes = classes;
-    }
-
-    void compareAndReport(JarInfo underTest) {
-      Set<ClassInfo> underTestClasses = new HashSet<>(underTest.classes.values());
-      for (ClassInfo classInfo : classes.values()) {
-        if (underTestClasses.contains(classInfo)) {
-          classInfo.compareAndReport(underTest.classes.get(classInfo.name));
-          underTestClasses.remove(classInfo);
-        } else {
-          container.errors.add(underTest + " does not contain class " + classInfo);
-        }
-      }
-
-      if (underTestClasses.size() > 0) {
-        for (ClassInfo extra : underTestClasses) {
-          container.warnings.add(underTest + " contains extra class " + extra);
-        }
-      }
-    }
-
-    void dumpToFile(File outputDir) throws IOException {
-      File output = new File(outputDir, name + "-" + version + "-api-report.json");
-      ObjectMapper mapper = new ObjectMapper();
-      mapper.writeValue(output, this);
-    }
-
-    void patchUpClassBackPointers(ApiExaminer container) {
-      this.container = container;
-      for (ClassInfo classInfo : classes.values()) {
-        classInfo.setJar(this);
-        classInfo.patchUpBackMethodBackPointers();
-      }
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof JarInfo)) return false;
-      JarInfo that = (JarInfo)other;
-      return name.equals(that.name) && version.equals(that.version);
-    }
-
-    @Override
-    public String toString() {
-      return name + "-" + version;
-    }
-  }
-
-  private static class ClassInfo {
-    @JsonIgnore JarInfo jar;
-    String name;
-    Map<String, MethodInfo> methods;
-
-    // For use by Jackson
-    public ClassInfo() {
-
-    }
-
-    ClassInfo(JarInfo jar, Class<?> clazz) {
-      this.jar = jar;
-      this.name = clazz.getName();
-      methods = new HashMap<>();
-
-      for (Method method : clazz.getMethods()) {
-        if (method.getDeclaringClass().equals(clazz)) {
-          LOG.debug("Processing method " + method.getName());
-          MethodInfo mi = new MethodInfo(this, method);
-          methods.put(mi.toString(), mi);
-        }
-      }
-    }
-
-    public JarInfo getJar() {
-      return jar;
-    }
-
-    public void setJar(JarInfo jar) {
-      this.jar = jar;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public Map<String, MethodInfo> getMethods() {
-      return methods;
-    }
-
-    public void setMethods(Map<String, MethodInfo> methods) {
-      this.methods = methods;
-    }
-
-    void compareAndReport(ClassInfo underTest) {
-      // Make a copy so we can remove them as we match them, making it easy to find additional ones
-      Set<MethodInfo> underTestMethods = new HashSet<>(underTest.methods.values());
-      for (MethodInfo methodInfo : methods.values()) {
-        if (underTestMethods.contains(methodInfo)) {
-          methodInfo.compareAndReport(underTest.methods.get(methodInfo.toString()));
-          underTestMethods.remove(methodInfo);
-        } else {
-          jar.container.errors.add(underTest + " does not contain method " + methodInfo);
-        }
-      }
-
-      if (underTestMethods.size() > 0) {
-        for (MethodInfo extra : underTestMethods) {
-          jar.container.warnings.add(underTest + " contains extra method " + extra);
-        }
-      }
-    }
-
-    void patchUpBackMethodBackPointers() {
-      for (MethodInfo methodInfo : methods.values()) methodInfo.setContainingClass(this);
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof ClassInfo)) return false;
-      ClassInfo that = (ClassInfo)other;
-      return name.equals(that.name);  // Classes can be compared just on names
-    }
-
-    @Override
-    public int hashCode() {
-      return name.hashCode();
-    }
-
-    @Override
-    public String toString() {
-      return jar + " " + name;
-    }
-  }
-
-  private static class MethodInfo {
-    @JsonIgnore ClassInfo containingClass;
-    String name;
-    String returnType;
-    List<String> args;
-    Set<String> exceptions;
-
-    // For use by Jackson
-    public MethodInfo() {
-
-    }
-
-    MethodInfo(ClassInfo containingClass, Method method) {
-      this.containingClass = containingClass;
-      this.name = method.getName();
-      args = new ArrayList<>();
-      for (Class<?> argClass : method.getParameterTypes()) {
-        args.add(argClass.getName());
-      }
-      returnType = method.getReturnType().getName();
-      exceptions = new HashSet<>();
-      for (Class<?> exception : method.getExceptionTypes()) {
-        exceptions.add(exception.getName());
-      }
-    }
-
-    public ClassInfo getContainingClass() {
-      return containingClass;
-    }
-
-    public void setContainingClass(ClassInfo containingClass) {
-      this.containingClass = containingClass;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public void setName(String name) {
-      this.name = name;
-    }
-
-    public String getReturnType() {
-      return returnType;
-    }
-
-    public void setReturnType(String returnType) {
-      this.returnType = returnType;
-    }
-
-    public List<String> getArgs() {
-      return args;
-    }
-
-    public void setArgs(List<String> args) {
-      this.args = args;
-    }
-
-    public Set<String> getExceptions() {
-      return exceptions;
-    }
-
-    public void setExceptions(Set<String> exceptions) {
-      this.exceptions = exceptions;
-    }
-
-    void compareAndReport(MethodInfo underTest) {
-      // Check to see if they've added or removed exceptions
-      // Make a copy so I can remove them as I check them off and easily find any that have been
-      // added.
-      Set<String> underTestExceptions = new HashSet<>(underTest.exceptions);
-      for (String exception : exceptions) {
-        if (underTest.exceptions.contains(exception)) {
-          underTestExceptions.remove(exception);
-        } else {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " removes exception " + exception);
-        }
-      }
-      if (underTestExceptions.size() > 0) {
-        for (String underTestException : underTest.exceptions) {
-          containingClass.jar.container.warnings.add(underTest.containingClass.jar + " " +
-              underTest.containingClass + "." + name + " adds exception " + underTestException);
-        }
-      }
-    }
-
-    @Override
-    public boolean equals(Object other) {
-      if (!(other instanceof MethodInfo)) return false;
-      MethodInfo that = (MethodInfo)other;
-
-      return containingClass.equals(that.containingClass) && name.equals(that.name) &&
-          returnType.equals(that.returnType) && args.equals(that.args);
-    }
-
-    @Override
-    public int hashCode() {
-      return ((containingClass.hashCode() * 31 + name.hashCode()) * 31 + returnType.hashCode()) * 31 +
-          args.hashCode();
-    }
-
-    @Override
-    public String toString() {
-      StringBuilder buf = new StringBuilder(returnType)
-          .append(" ")
-          .append(name)
-          .append('(');
-      boolean first = true;
-      for (String arg : args) {
-        if (first) first = false;
-        else buf.append(", ");
-        buf.append(arg);
-      }
-      buf.append(")");
-      if (exceptions.size() > 0) {
-        buf.append(" throws ");
-        first = true;
-        for (String exception : exceptions) {
-          if (first) first = false;
-          else buf.append(", ");
-          buf.append(exception);
-        }
-      }
-      return buf.toString();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java b/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
deleted file mode 100644
index 4110d5d..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/java/org/odpi/specs/runtime/hive/HCatalogMR.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.odpi.specs.runtime.hive;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.GenericOptionsParser;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.hcatalog.data.DefaultHCatRecord;
-import org.apache.hive.hcatalog.data.HCatRecord;
-import org.apache.hive.hcatalog.data.schema.HCatSchema;
-import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
-import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
-import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
-import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.StringTokenizer;
-
-public class HCatalogMR extends Configured implements Tool {
-  private final static String INPUT_SCHEMA = "odpi.test.hcat.schema.input";
-  private final static String OUTPUT_SCHEMA = "odpi.test.hcat.schema.output";
-
-  @Override
-  public int run(String[] args) throws Exception {
-    String inputTable = null;
-    String outputTable = null;
-    String inputSchemaStr = null;
-    String outputSchemaStr = null;
-    for(int i = 0; i < args.length; i++){
-        if(args[i].equalsIgnoreCase("-it")){
-            inputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-ot")){
-            outputTable = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-is")){
-            inputSchemaStr = args[i+1];
-        }else if(args[i].equalsIgnoreCase("-os")){
-            outputSchemaStr = args[i+1];
-        }
-    }
-    
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    conf.set(INPUT_SCHEMA, inputSchemaStr);
-    conf.set(OUTPUT_SCHEMA, outputSchemaStr);
-
-    Job job = new Job(conf, "odpi_hcat_test");
-    HCatInputFormat.setInput(job, "default", inputTable);
-
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setJarByClass(HCatalogMR.class);
-    job.setMapperClass(Map.class);
-    job.setReducerClass(Reduce.class);
-    job.setMapOutputKeyClass(Text.class);
-    job.setMapOutputValueClass(IntWritable.class);
-    job.setOutputKeyClass(WritableComparable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create("default", outputTable, null));
-    HCatOutputFormat.setSchema(job, HCatSchemaUtils.getHCatSchema(outputSchemaStr));
-    job.setOutputFormatClass(HCatOutputFormat.class);
-
-    return job.waitForCompletion(true) ? 0 : 1;
-
-
-  }
-  public static class Map extends Mapper<WritableComparable,
-          HCatRecord, Text, IntWritable> {
-    private final static IntWritable one = new IntWritable(1);
-    private Text word = new Text();
-    private HCatSchema inputSchema = null;
-
-    @Override
-    protected void map(WritableComparable key, HCatRecord value, Context context)
-        throws IOException, InterruptedException {
-      if (inputSchema == null) {
-        inputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(INPUT_SCHEMA));
-      }
-      String line = value.getString("line", inputSchema);
-      StringTokenizer tokenizer = new StringTokenizer(line);
-      while (tokenizer.hasMoreTokens()) {
-        word.set(tokenizer.nextToken());
-        context.write(word, one);
-      }
-    }
-  }
-
-  public static class Reduce extends Reducer<Text, IntWritable, WritableComparable, HCatRecord> {
-    private HCatSchema outputSchema = null;
-
-    @Override
-    protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws
-        IOException, InterruptedException {
-      if (outputSchema == null) {
-        outputSchema =
-            HCatSchemaUtils.getHCatSchema(context.getConfiguration().get(OUTPUT_SCHEMA));
-      }
-      int sum = 0;
-      for (IntWritable i : values) {
-        sum += i.get();
-      }
-      HCatRecord output = new DefaultHCatRecord(2);
-      output.set("word", outputSchema, key);
-      output.set("count", outputSchema, sum);
-      context.write(null, output);
-    }
-  }
-
-  public static void main(String[] args) throws Exception {
-    int exitCode = ToolRunner.run(new HCatalogMR(), args);
-    System.exit(exitCode);
-  }
- }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5e342c45/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
----------------------------------------------------------------------
diff --git a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh b/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
deleted file mode 100755
index 8c9ab5e..0000000
--- a/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner-prep.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env bash
-
-############################################################################
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-############################################################################
-
-############################################################################
-# This script is used to generate the hadoop-*-api.report.json files in the
-# test/resources directory.  To use it, you will first need to download an
-# Apache binary distribution of Hadoop and set APACHE_HADOOP_DIR to the
-# directory where you untar that distribution.  You will then need to set
-# BIGTTOP_HOME to the directory where your bigtop source is located.  Then
-# run this script for each of the jars you want to generate a report for.
-# The arguments passed to this script should be -p <outputdir> -j <jarfile>
-# where outputdir is the directory you'd like to write the report to and
-# jarfile is the full path of the jar to generate the report for.  Reports
-# should be generated for the following jars: hadoop-common, hadoop-hdfs,
-# hadoop-yarn-common, hadoop-yarn-client, hadoop-yarn-api, and
-# hadoop-mapreduce-client-core
-#
-# Example usage:
-# export APACHE_HADOOP_DIR=/tmp/hadoop-2.7.3
-# export BIGTOP_HOME=/home/me/git/bigtop
-# $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/main/resources/api-examiner.sh -j $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.3.jar -p $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/src/test/resources
-#
-# The resulting reports should be committed to git.  This script only needs
-# to be run once per ODPi release.
-############################################################################
-
-
-if [ "x${APACHE_HADOOP_DIR}" = "x" ]
-then
-    echo "You must set APACHE_HADOOP_DIR to the directory you have placed the Apache Hadoop binary distribution in"
-    exit 1
-fi
-
-if [ "x${BIGTOP_HOME}" = "x" ]
-then
-    echo "You must set BIGTOP_HOME to the root directory for your bigtop source"
-    exit 1
-fi
-
-for jar in `find $BIGTOP_HOME/bigtop-tests/spec-tests/runtime/build/libs/ -name \*.jar`
-do
-    CLASSPATH=$CLASSPATH:$jar
-done
-
-for jar in `find $APACHE_HADOOP_DIR -name \*.jar`
-do
-    CLASSPATH=$CLASSPATH:$jar
-done
-
-java -cp $CLASSPATH org.odpi.specs.runtime.hadoop.ApiExaminer $@
-