You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2011/10/30 14:10:00 UTC

Build failed in Jenkins: Hadoop-Hdfs-0.23-Build #55

See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/55/changes>

Changes:

[szetszwo] Revert 1190680 for HDFS-2509.

------------------------------------------
[...truncated 9528 lines...]
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyDefault$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/blockmanagement/UnderReplicatedBlocks$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/Storage$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/HdfsServerConstants$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/JspHelper$1NodeComapare.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/JspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDataset$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDataset$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/browseBlock_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockReceiver$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$BlockPoolManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/browseDirectory_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/tail_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDatasetAsyncDiskService$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$3$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$6.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FileJournalManager$EditLogFile$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfsnodelist_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$7.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper$NamenodeMXBeanHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FileDataServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/block_005finfo_005fxml_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer$1$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/BackupNode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/corrupt_005freplicas_005fxml_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FsckServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSNamesystem$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/BackupImage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/targetJDiff>: finished (took 0s, not including scanning the source files).
  [javadoc] /classes/org/apache/hadoop/hdfs/server/namenode/decommission_jsp.class]
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/status_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetImageServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetImageServlet$1$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/LeaseManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/nn_005fbrowsedfscontent_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfshealth_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfsclusterhealth_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp$BlockTwo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/corrupt_005ffiles_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/UpgradeManagerNamenode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$6.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand$RecoveringBlock$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BalancerBandwidthCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/GetConf$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/DFSck$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsLoaderCurrent$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/util/CyclicIteration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/util/LightWeightGSet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/WebHdfsFileSystem$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/ParamFilter$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/resources/Param$1.class]>
  [javadoc] [done in 1877 ms]
  [javadoc] Generating Javadoc
  [javadoc] Javadoc execution
  [javadoc] javadoc: error - Illegal package name: ""
  [javadoc] javadoc: error - File not found: "<https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/dev-support/jdiff/Null.java">
  [javadoc] Loading source files for package org.apache.hadoop.fs...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol.datatransfer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol.proto...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.security.token.block...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.security.token.delegation...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.balancer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.blockmanagement...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.common...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode.web.resources...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode.web.resources...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.protocol...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools.offlineEditsViewer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools.offlineImageViewer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.util...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.web...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.web.resources...
  [javadoc] 2 errors
     [xslt] Processing <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/findbugsXml.xml> to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/findbugs.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (pre-dist) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (xprepare-package-hadoop-daemon) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
      [get] Destination already exists (skipping): <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/downloads/commons-daemon-1.0.3-bin-linux-i686.tar.gz>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging>
    [untar] Expanding: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/downloads/commons-daemon-1.0.3-bin-linux-i686.tar.gz> into <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-0.23.0-SNAPSHOT/libexec>
     [copy] Copying <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging/jsvc> to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-0.23.0-SNAPSHOT/libexec/jsvc>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-jar-plugin:2.3.1:jar (default-jar) @ hadoop-hdfs ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs ---
[INFO] 
[INFO] --- maven-assembly-plugin:2.2-beta-3:single (src-dist) @ hadoop-hdfs ---
[INFO] Reading assembly descriptor: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [2:14.374s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 2:14.868s
[INFO] Finished at: Sun Oct 30 11:36:20 UTC 2011
[INFO] Final Memory: 44M/410M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3:single (src-dist) on project hadoop-hdfs: Error reading assemblies: Error locating assembly descriptor: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] 
[ERROR] [1] [INFO] Searching for file location: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml>
[ERROR] 
[ERROR] [2] [INFO] File: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml> does not exist.
[ERROR] 
[ERROR] [3] [INFO] Invalid artifact specification: 'hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml'. Must contain at least three fields, separated by ':'.
[ERROR] 
[ERROR] [4] [INFO] Failed to resolve classpath resource: /assemblies/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml from classloader: ClassRealm[plugin>org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3, parent: sun.misc.Launcher$AppClassLoader@126b249]
[ERROR] 
[ERROR] [5] [INFO] Failed to resolve classpath resource: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml from classloader: ClassRealm[plugin>org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3, parent: sun.misc.Launcher$AppClassLoader@126b249]
[ERROR] 
[ERROR] [6] [INFO] File: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml> does not exist.
[ERROR] 
[ERROR] [7] [INFO] Building URL from location: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] Error:
[ERROR] java.net.MalformedURLException: no protocol: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] at java.net.URL.<init>(URL.java:567)
[ERROR] at java.net.URL.<init>(URL.java:464)
[ERROR] at java.net.URL.<init>(URL.java:413)
[ERROR] at org.apache.maven.shared.io.location.URLLocatorStrategy.resolve(URLLocatorStrategy.java:54)
[ERROR] at org.apache.maven.shared.io.location.Locator.resolve(Locator.java:81)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.addAssemblyFromDescriptor(DefaultAssemblyReader.java:309)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.readAssemblies(DefaultAssemblyReader.java:140)
[ERROR] at org.apache.maven.plugin.assembly.mojos.AbstractAssemblyMojo.execute(AbstractAssemblyMojo.java:328)
[ERROR] at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:101)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:209)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:84)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:59)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.singleThreadedBuild(LifecycleStarter.java:183)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:161)
[ERROR] at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:319)
[ERROR] at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:156)
[ERROR] at org.apache.maven.cli.MavenCli.execute(MavenCli.java:537)
[ERROR] at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:196)
[ERROR] at org.apache.maven.cli.MavenCli.main(MavenCli.java:141)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[ERROR] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[ERROR] at java.lang.reflect.Method.invoke(Method.java:597)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:290)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:230)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:409)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:352)
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Build step 'Execute shell' marked build as failure
Archiving artifacts
Publishing Clover coverage report...
Clover xml file does not exist in: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover> called: clover.xml and will not be copied to: /home/hudson/hudson/jobs/Hadoop-Hdfs-0.23-Build/builds/2011-10-30_11-31-21/clover.xml
Could not find 'trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover/clover.xml'.  Did you generate the XML report for Clover?
Recording test results
Publishing Javadoc
Recording fingerprints
Updating HDFS-2509


Hadoop-Hdfs-0.23-Build - Build # 56 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/56/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 9742 lines...]
[ERROR] Error:
[ERROR] java.net.MalformedURLException: no protocol: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] at java.net.URL.<init>(URL.java:567)
[ERROR] at java.net.URL.<init>(URL.java:464)
[ERROR] at java.net.URL.<init>(URL.java:413)
[ERROR] at org.apache.maven.shared.io.location.URLLocatorStrategy.resolve(URLLocatorStrategy.java:54)
[ERROR] at org.apache.maven.shared.io.location.Locator.resolve(Locator.java:81)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.addAssemblyFromDescriptor(DefaultAssemblyReader.java:309)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.readAssemblies(DefaultAssemblyReader.java:140)
[ERROR] at org.apache.maven.plugin.assembly.mojos.AbstractAssemblyMojo.execute(AbstractAssemblyMojo.java:328)
[ERROR] at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:101)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:209)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:84)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:59)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.singleThreadedBuild(LifecycleStarter.java:183)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:161)
[ERROR] at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:319)
[ERROR] at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:156)
[ERROR] at org.apache.maven.cli.MavenCli.execute(MavenCli.java:537)
[ERROR] at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:196)
[ERROR] at org.apache.maven.cli.MavenCli.main(MavenCli.java:141)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[ERROR] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[ERROR] at java.lang.reflect.Method.invoke(Method.java:597)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:290)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:230)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:409)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:352)
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Build step 'Execute shell' marked build as failure
Archiving artifacts
Publishing Clover coverage report...
Clover xml file does not exist in: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover called: clover.xml and will not be copied to: /home/hudson/hudson/jobs/Hadoop-Hdfs-0.23-Build/builds/2011-10-31_11-31-21/clover.xml
Could not find 'trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover/clover.xml'.  Did you generate the XML report for Clover?
Recording test results
Publishing Javadoc
Recording fingerprints
Updating MAPREDUCE-3313
Updating MAPREDUCE-2766
Updating MAPREDUCE-3262
Updating MAPREDUCE-3274
Updating MAPREDUCE-3171
Updating MAPREDUCE-3146
Updating MAPREDUCE-2747
Updating MAPREDUCE-3240
Updating MAPREDUCE-2696
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
28 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestClientReportBadBlock.testOneBlockReplica

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.LeaseRenewer.put(LeaseRenewer.java:313)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:759)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:714)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:252)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:725)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:706)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:634)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:188)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.createAFileWithCorruptedBlockReplicas(TestClientReportBadBlock.java:206)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.__CLR3_0_2lwx38a11m6(TestClientReportBadBlock.java:105)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.testOneBlockReplica(TestClientReportBadBlock.java:98)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestClientReportBadBlock.testCorruptAllOfThreeReplicas

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.LeaseRenewer.put(LeaseRenewer.java:313)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:759)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:714)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:252)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:725)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:706)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:634)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:188)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.createAFileWithCorruptedBlockReplicas(TestClientReportBadBlock.java:206)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.__CLR3_0_2ufnut911mp(TestClientReportBadBlock.java:137)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.testCorruptAllOfThreeReplicas(TestClientReportBadBlock.java:129)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestClientReportBadBlock.testCorruptTwoOutOfThreeReplicas

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.LeaseRenewer.put(LeaseRenewer.java:313)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:759)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:714)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:252)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:725)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:706)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:634)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:188)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.createAFileWithCorruptedBlockReplicas(TestClientReportBadBlock.java:206)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.__CLR3_0_2xrn2b011n8(TestClientReportBadBlock.java:168)
	at org.apache.hadoop.hdfs.TestClientReportBadBlock.testCorruptTwoOutOfThreeReplicas(TestClientReportBadBlock.java:161)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


REGRESSION:  org.apache.hadoop.hdfs.TestQuota.testSpaceCommands

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.LeaseRenewer.put(LeaseRenewer.java:313)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:759)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:714)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:252)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:725)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:706)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:634)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:188)
	at org.apache.hadoop.hdfs.TestQuota.__CLR3_0_2x9n8qr18i5(TestQuota.java:574)
	at org.apache.hadoop.hdfs.TestQuota.testSpaceCommands(TestQuota.java:529)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestCrcCorruption.testCrcCorruption

Error Message:
Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data

Stack Trace:
java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:562)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestCrcCorruption.thistest(TestCrcCorruption.java:75)
	at org.apache.hadoop.hdfs.TestCrcCorruption.__CLR3_0_269rbwc11ut(TestCrcCorruption.java:210)
	at org.apache.hadoop.hdfs.TestCrcCorruption.testCrcCorruption(TestCrcCorruption.java:202)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestCrcCorruption.testEntirelyCorruptFileOneNode

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestCrcCorruption.doTestEntirelyCorruptFile(TestCrcCorruption.java:253)
	at org.apache.hadoop.hdfs.TestCrcCorruption.__CLR3_0_2c0xia211v5(TestCrcCorruption.java:231)
	at org.apache.hadoop.hdfs.TestCrcCorruption.testEntirelyCorruptFileOneNode(TestCrcCorruption.java:230)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.internal.runners.statements.FailOnTimeout$1.run(FailOnTimeout.java:28)


FAILED:  org.apache.hadoop.hdfs.TestCrcCorruption.testEntirelyCorruptFileThreeNodes

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestCrcCorruption.doTestEntirelyCorruptFile(TestCrcCorruption.java:253)
	at org.apache.hadoop.hdfs.TestCrcCorruption.__CLR3_0_26etsr711v7(TestCrcCorruption.java:244)
	at org.apache.hadoop.hdfs.TestCrcCorruption.testEntirelyCorruptFileThreeNodes(TestCrcCorruption.java:243)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.internal.runners.statements.FailOnTimeout$1.run(FailOnTimeout.java:28)


FAILED:  org.apache.hadoop.hdfs.TestFileAppend3$1.org.apache.hadoop.hdfs.TestFileAppend3

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestFileAppend3$1.setUp(TestFileAppend3.java:73)
	at junit.extensions.TestSetup$1.protect(TestSetup.java:22)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.extensions.TestSetup.run(TestSetup.java:27)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestLeaseRecovery.testBlockSynchronization

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.ipc.Server.start(Server.java:1697)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.runDatanodeDaemon(DataNode.java:2142)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:915)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestLeaseRecovery.__CLR3_0_228hms417cm(TestLeaseRecovery.java:76)
	at org.apache.hadoop.hdfs.TestLeaseRecovery.testBlockSynchronization(TestLeaseRecovery.java:68)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestParallelRead.org.apache.hadoop.hdfs.TestParallelRead

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.BlockReaderTestUtil.<init>(BlockReaderTestUtil.java:53)
	at org.apache.hadoop.hdfs.TestParallelRead.setupCluster(TestParallelRead.java:63)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:27)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestParallelRead.org.apache.hadoop.hdfs.TestParallelRead

Error Message:
null

Stack Trace:
java.lang.NullPointerException
	at org.apache.hadoop.hdfs.TestParallelRead.teardownCluster(TestParallelRead.java:280)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestQuota.testBlockAllocationAdjustsUsageConservatively

Error Message:
Error while running command to get file permissions : java.io.IOException: Cannot run program "/bin/ls": java.io.IOException: error=11, Resource temporarily unavailable  at java.lang.ProcessBuilder.start(ProcessBuilder.java:460)  at org.apache.hadoop.util.Shell.runCommand(Shell.java:206)  at org.apache.hadoop.util.Shell.run(Shell.java:188)  at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:381)  at org.apache.hadoop.util.Shell.execCommand(Shell.java:467)  at org.apache.hadoop.util.Shell.execCommand(Shell.java:450)  at org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:556)  at org.apache.hadoop.fs.RawLocalFileSystem.access$100(RawLocalFileSystem.java:50)  at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:477)  at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:452)  at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)  at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)  at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:2280)  at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2259)  at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2196)  at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2163)  at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:901)  at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)  at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)  at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)  at org.apache.hadoop.hdfs.TestQuota.__CLR3_0_2k0z6jd18lw(TestQuota.java:789)  at org.apache.hadoop.hdfs.TestQuota.testBlockAllocationAdjustsUsageConservatively(TestQuota.java:784)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)  at java.lang.reflect.Method.invoke(Method.java:597)  at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)  at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)  at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)  at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)  at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)  at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)  at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)  at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)  at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)  at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)  at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)  at org.junit.runners.ParentRunner.run(ParentRunner.java:236)  at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)  at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)  at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)  at java.lang.reflect.Method.invoke(Method.java:597)  at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)  at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)  at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)  at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)  at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70) Caused by: java.io.IOException: java.io.IOException: error=11, Resource temporarily unavailable  at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)  at java.lang.ProcessImpl.start(ProcessImpl.java:65)  at java.lang.ProcessBuilder.start(ProcessBuilder.java:453)  ... 51 more 

Stack Trace:
java.lang.RuntimeException: Error while running command to get file permissions : java.io.IOException: Cannot run program "/bin/ls": java.io.IOException: error=11, Resource temporarily unavailable
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:460)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:206)
	at org.apache.hadoop.util.Shell.run(Shell.java:188)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:381)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:467)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:450)
	at org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:556)
	at org.apache.hadoop.fs.RawLocalFileSystem.access$100(RawLocalFileSystem.java:50)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:477)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:452)
	at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)
	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:2280)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2259)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2196)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2163)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:901)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestQuota.__CLR3_0_2k0z6jd18lw(TestQuota.java:789)
	at org.apache.hadoop.hdfs.TestQuota.testBlockAllocationAdjustsUsageConservatively(TestQuota.java:784)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)
Caused by: java.io.IOException: java.io.IOException: error=11, Resource temporarily unavailable
	at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
	at java.lang.ProcessImpl.start(ProcessImpl.java:65)
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:453)
	... 51 more

	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:502)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:452)
	at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)
	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:2280)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2259)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2196)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2163)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:901)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestQuota.__CLR3_0_2k0z6jd18lw(TestQuota.java:789)
	at org.apache.hadoop.hdfs.TestQuota.testBlockAllocationAdjustsUsageConservatively(TestQuota.java:784)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestQuota.testMultipleFilesSmallerThanOneBlock

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:240)
	at org.apache.hadoop.util.Shell.run(Shell.java:188)
	at org.apache.hadoop.fs.DF.getFilesystem(DF.java:102)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker.addDirsToCheck(NameNodeResourceChecker.java:91)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker.<init>(NameNodeResourceChecker.java:71)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:317)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestQuota.__CLR3_0_2n8cxri18mt(TestQuota.java:851)
	at org.apache.hadoop.hdfs.TestQuota.testMultipleFilesSmallerThanOneBlock(TestQuota.java:846)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure.testReplaceDatanodeOnFailure

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.DFSOutputStream.<init>(DFSOutputStream.java:1260)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:756)
	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:714)
	at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:252)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:725)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:706)
	at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:634)
	at org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure$SlowWriter.<init>(TestReplaceDatanodeOnFailure.java:197)
	at org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure.__CLR3_0_2qv2btw18w5(TestReplaceDatanodeOnFailure.java:130)
	at org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure.testReplaceDatanodeOnFailure(TestReplaceDatanodeOnFailure.java:112)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestSetrepDecreasing.testSetrepDecreasing

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.runDatanodeDaemon(DataNode.java:2141)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:915)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.setrep(TestSetrepIncreasing.java:36)
	at org.apache.hadoop.hdfs.TestSetrepDecreasing.__CLR3_0_2nu11db19g0(TestSetrepDecreasing.java:26)
	at org.apache.hadoop.hdfs.TestSetrepDecreasing.testSetrepDecreasing(TestSetrepDecreasing.java:25)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestSetrepIncreasing.testSetrepIncreasing

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.server.datanode.DataNode$BPOfferService.start(DataNode.java:1057)
	at org.apache.hadoop.hdfs.server.datanode.DataNode$BlockPoolManager$1.run(DataNode.java:330)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1152)
	at org.apache.hadoop.hdfs.server.datanode.DataNode$BlockPoolManager.startAll(DataNode.java:326)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.runDatanodeDaemon(DataNode.java:2138)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:915)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:786)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:567)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.setrep(TestSetrepIncreasing.java:36)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.__CLR3_0_2sz96c319gw(TestSetrepIncreasing.java:72)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.testSetrepIncreasing(TestSetrepIncreasing.java:71)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestSetrepIncreasing.testSetrepIncreasingSimulatedStorage

Error Message:
unable to create new native thread

Stack Trace:
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:640)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:837)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:170)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:606)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:149)
	at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:630)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.setrep(TestSetrepIncreasing.java:36)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.__CLR3_0_23efmtu19gy(TestSetrepIncreasing.java:75)
	at org.apache.hadoop.hdfs.TestSetrepIncreasing.testSetrepIncreasingSimulatedStorage(TestSetrepIncreasing.java:74)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.security.TestDelegationToken.testDelegationTokenDFSApi

Error Message:
Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data

Stack Trace:
java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:562)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.security.TestDelegationToken.setUp(TestDelegationToken.java:69)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:27)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.security.TestDelegationToken.testDelegationTokenWebHdfsApi

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.security.TestDelegationToken.setUp(TestDelegationToken.java:69)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:27)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.security.TestDelegationToken.testDelegationTokenWithDoAs

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.security.TestDelegationToken.setUp(TestDelegationToken.java:69)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:27)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.doTest(TestBalancer.java:301)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.oneNodeTest(TestBalancer.java:346)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.__CLR3_0_29j3j5b1ae0(TestBalancer.java:366)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0(TestBalancer.java:363)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer1

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.generateBlocks(TestBalancer.java:99)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testUnevenDistribution(TestBalancer.java:181)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.__CLR3_0_2cs3hxs1ae5(TestBalancer.java:374)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer1(TestBalancer.java:371)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer2

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name2. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancerDefaultConstructor(TestBalancer.java:392)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.__CLR3_0_2g13gq91ae9(TestBalancer.java:383)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer2(TestBalancer.java:380)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.testBalancer

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createFederatedNameNode(MiniDFSCluster.java:649)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createFederationNamenodes(MiniDFSCluster.java:610)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:556)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.runTest(TestBalancerWithMultipleNameNodes.java:330)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.__CLR3_0_27qjdv51ak2(TestBalancerWithMultipleNameNodes.java:375)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.testBalancer(TestBalancerWithMultipleNameNodes.java:373)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.testUnevenDistribution

Error Message:
Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:586)
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:435)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:294)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:210)
	at org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:175)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:329)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:301)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:298)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createFederatedNameNode(MiniDFSCluster.java:649)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createFederationNamenodes(MiniDFSCluster.java:610)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:556)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.unevenDistribution(TestBalancerWithMultipleNameNodes.java:255)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.__CLR3_0_25c33v41ak5(TestBalancerWithMultipleNameNodes.java:383)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancerWithMultipleNameNodes.testUnevenDistribution(TestBalancerWithMultipleNameNodes.java:381)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.testOIV

Error Message:
Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data

Stack Trace:
java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:562)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.initFsimage(TestOfflineImageViewer.java:106)
	at org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.__CLR3_0_26f69yp1jul(TestOfflineImageViewer.java:82)
	at org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer.testOIV(TestOfflineImageViewer.java:79)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.cli.TestHDFSCLI.testAll

Error Message:
One of the tests failed. See the Detailed results to identify the command that failed

Stack Trace:
java.lang.AssertionError: One of the tests failed. See the Detailed results to identify the command that failed
	at org.junit.Assert.fail(Assert.java:91)
	at org.junit.Assert.assertTrue(Assert.java:43)
	at org.apache.hadoop.cli.CLITestHelper.displayResults(CLITestHelper.java:264)
	at org.apache.hadoop.cli.CLITestHelper.tearDown(CLITestHelper.java:126)
	at org.apache.hadoop.cli.TestHDFSCLI.tearDown(TestHDFSCLI.java:81)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestDfsOverAvroRpc.testWorkingDirectory

Error Message:
Two methods with same name: reportBadBlocks

Stack Trace:
org.apache.avro.AvroTypeException: Two methods with same name: reportBadBlocks
	at org.apache.avro.reflect.ReflectData.getProtocol(ReflectData.java:394)
	at org.apache.avro.ipc.reflect.ReflectResponder.<init>(ReflectResponder.java:36)
	at org.apache.hadoop.ipc.AvroRpcEngine.createResponder(AvroRpcEngine.java:189)
	at org.apache.hadoop.ipc.AvroRpcEngine$TunnelResponder.<init>(AvroRpcEngine.java:196)
	at org.apache.hadoop.ipc.AvroRpcEngine.getServer(AvroRpcEngine.java:232)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:550)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:145)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:356)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:334)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestLocalDFS.__CLR3_0_2hl5jzp17s3(TestLocalDFS.java:64)
	at org.apache.hadoop.hdfs.TestLocalDFS.testWorkingDirectory(TestLocalDFS.java:62)
	at org.apache.hadoop.hdfs.TestDfsOverAvroRpc.__CLR3_0_2hl5jzp14yh(TestDfsOverAvroRpc.java:30)
	at org.apache.hadoop.hdfs.TestDfsOverAvroRpc.testWorkingDirectory(TestDfsOverAvroRpc.java:27)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)




Hadoop-Hdfs-0.23-Build - Build # 58 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/58/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 7980 lines...]
[INFO]                  from hdfs.c:19:
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: duplicate 'unsigned'
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: two or more data types in declaration specifiers
[INFO] make: *** [hdfs.lo] Error 1
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [22.162s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 23.890s
[INFO] Finished at: Mon Oct 31 22:09:40 UTC 2011
[INFO] Final Memory: 27M/275M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:make-maven-plugin:1.0-beta-1:make-install (compile) on project hadoop-hdfs: make returned an exit value != 0. Aborting build; see command output above for more information. -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Archiving artifacts
Publishing Clover coverage report...
Publishing Clover HTML report...
Publishing Clover XML report...
Publishing Clover coverage results...
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Publishing Javadoc
ERROR: Publisher hudson.tasks.JavadocArchiver aborted due to exception
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/api does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:474)
	at hudson.FilePath$34.hasMatch(FilePath.java:1801)
	at hudson.FilePath$34.invoke(FilePath.java:1710)
	at hudson.FilePath$34.invoke(FilePath.java:1701)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1995)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:287)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Recording fingerprints
Updating MAPREDUCE-3157
Updating MAPREDUCE-3166
Updating MAPREDUCE-3035
Updating HDFS-2385
Updating MAPREDUCE-3275
Updating HDFS-2512
Updating MAPREDUCE-3241
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  org.apache.hadoop.cli.TestHDFSCLI.testAll

Error Message:
One of the tests failed. See the Detailed results to identify the command that failed

Stack Trace:
java.lang.AssertionError: One of the tests failed. See the Detailed results to identify the command that failed
	at org.junit.Assert.fail(Assert.java:91)
	at org.junit.Assert.assertTrue(Assert.java:43)
	at org.apache.hadoop.cli.CLITestHelper.displayResults(CLITestHelper.java:264)
	at org.apache.hadoop.cli.CLITestHelper.tearDown(CLITestHelper.java:126)
	at org.apache.hadoop.cli.TestHDFSCLI.tearDown(TestHDFSCLI.java:81)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)


FAILED:  org.apache.hadoop.hdfs.TestDfsOverAvroRpc.testWorkingDirectory

Error Message:
Two methods with same name: reportBadBlocks

Stack Trace:
org.apache.avro.AvroTypeException: Two methods with same name: reportBadBlocks
	at org.apache.avro.reflect.ReflectData.getProtocol(ReflectData.java:394)
	at org.apache.avro.ipc.reflect.ReflectResponder.<init>(ReflectResponder.java:36)
	at org.apache.hadoop.ipc.AvroRpcEngine.createResponder(AvroRpcEngine.java:189)
	at org.apache.hadoop.ipc.AvroRpcEngine$TunnelResponder.<init>(AvroRpcEngine.java:196)
	at org.apache.hadoop.ipc.AvroRpcEngine.getServer(AvroRpcEngine.java:232)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:550)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:145)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:356)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:334)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:458)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:450)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:751)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:641)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:545)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:261)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:247)
	at org.apache.hadoop.hdfs.TestLocalDFS.__CLR3_0_2hl5jzp17xq(TestLocalDFS.java:64)
	at org.apache.hadoop.hdfs.TestLocalDFS.testWorkingDirectory(TestLocalDFS.java:62)
	at org.apache.hadoop.hdfs.TestDfsOverAvroRpc.__CLR3_0_2hl5jzp1544(TestDfsOverAvroRpc.java:30)
	at org.apache.hadoop.hdfs.TestDfsOverAvroRpc.testWorkingDirectory(TestDfsOverAvroRpc.java:27)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:168)
	at junit.framework.TestCase.runBare(TestCase.java:134)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:124)
	at junit.framework.TestSuite.runTest(TestSuite.java:232)
	at junit.framework.TestSuite.run(TestSuite.java:227)
	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
	at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:53)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:123)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:104)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:164)
	at org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:110)
	at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:172)
	at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcessWhenForked(SurefireStarter.java:78)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:70)




Jenkins build is unstable: Hadoop-Hdfs-0.23-Build #57

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/57/changes>



Hadoop-Hdfs-0.23-Build - Build # 59 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/59/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 7867 lines...]
[INFO]                  from /usr/include/sys/types.h:27,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/include/gnu/stubs.h:7:27: error: gnu/stubs-32.h: No such file or directory
[INFO] In file included from /usr/include/sys/types.h:147,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: duplicate 'unsigned'
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: two or more data types in declaration specifiers
[INFO] make: *** [hdfs.lo] Error 1
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [22.602s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 23.145s
[INFO] Finished at: Tue Nov 01 05:02:48 UTC 2011
[INFO] Final Memory: 25M/242M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:make-maven-plugin:1.0-beta-1:make-install (compile) on project hadoop-hdfs: make returned an exit value != 0. Aborting build; see command output above for more information. -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Archiving artifacts
Publishing Clover coverage report...
Publishing Clover HTML report...
Publishing Clover XML report...
Publishing Clover coverage results...
Recording test results
Publishing Javadoc
ERROR: Publisher hudson.tasks.JavadocArchiver aborted due to exception
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/api does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:474)
	at hudson.FilePath$34.hasMatch(FilePath.java:1801)
	at hudson.FilePath$34.invoke(FilePath.java:1710)
	at hudson.FilePath$34.invoke(FilePath.java:1701)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1995)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:287)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Recording fingerprints
Error updating JIRA issues. Saving issues for next build.
com.atlassian.jira.rpc.exception.RemotePermissionException: This issue does not exist or you don't have permission to view it.
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
All tests passed

Build failed in Jenkins: Hadoop-Hdfs-0.23-Build #59

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/59/changes>

Changes:

[suresh] Fix HDFS-2552 to HDFS-2522

[suresh] Disable TestDfsOverAvroRpc in 0.23. Contributed by Suresh Srinivas.

[tomwhite] Merge -r 1195816:1195817 from trunk to branch-0.23. Fixes: HADOOP-7782.

[mahadev] MAPREDUCE-3317. Rumen TraceBuilder is emiting null as hostname. (Ravi Gummadi via mahadev) - Merging r1195814 from trunk.

[mahadev] MAPREDUCE-3316. Rebooted link is not working properly. (Bhallamudi Venkata Siva Kamesh via mahadev) - Merging r1195805 from trunk.

[acmurthy] Merge -c 1195792 from trunk to branch-0.23 to fix MAPREDUCE-3237.

[acmurthy] Fixing CHANGES.txt to reflect 0.23 content.

[acmurthy] Merge -c 1195764 from trunk to branch-0.23 to fix MAPREDUCE-3322.

[mahadev] MAPREDUCE-3103. Implement Job ACLs for MRAppMaster. (mahadev) - Merging r1195761 from trunk.

[szetszwo] svn merge -c 1195760 from trunk for HADOOP-7771.

[szetszwo] svn merge -c 1195754 from trunk for HDFS-2038.

[acmurthy] Merge -c 1195745 from trunk to branch-0.23 to fix MAPREDUCE-3220.

[acmurthy] Merge -c 1195743 from trunk to branch-0.23 to fix MAPREDUCE-3321.

[szetszwo] svn merge -c 1195731 from trunk for HDFS-2065.

------------------------------------------
[...truncated 7674 lines...]
[WARNING] 
[WARNING] It is highly recommended to fix these problems because they threaten the stability of your build.
[WARNING] 
[WARNING] For this reason, future Maven versions might no longer support building such malformed projects.
[WARNING] 
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Build Order:
[INFO] 
[INFO] Apache Hadoop HDFS
[INFO] Apache Hadoop HDFS Project
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS 0.23.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.4.1:clean (default-clean) @ hadoop-hdfs ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target>
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (hdfs) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes>
[INFO] Compiling 8 JSP source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
log4j:WARN No appenders could be found for logger (org.apache.jasper.JspC).
log4j:WARN Please initialize the log4j system properly.
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.261
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (secondary) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
[INFO] Compiling 1 JSP source file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.017
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (datanode) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
[INFO] Compiling 3 JSP source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.025
[INFO] 
[INFO] --- build-helper-maven-plugin:1.5:add-source (add-source) @ hadoop-hdfs ---
[INFO] Source directory: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp> added.
[INFO] 
[INFO] --- maven-resources-plugin:2.4.3:resources (default-resources) @ hadoop-hdfs ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 2 resources
[INFO] 
[INFO] --- maven-compiler-plugin:2.3.2:compile (default-compile) @ hadoop-hdfs ---
[INFO] Compiling 328 source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-web-xmls) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/hdfs/WEB-INF>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/secondary/WEB-INF>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/datanode/WEB-INF>
     [copy] Copying 6 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (compile) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
     [copy] Copying 15 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/native>
     [copy] Copied 6 empty directories to 2 empty directories under <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/native>
[INFO] Executed tasks
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:autoreconf (compile) @ hadoop-hdfs ---
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:configure (compile) @ hadoop-hdfs ---
[INFO] checking for a BSD-compatible install... /usr/bin/install -c
[INFO] checking whether build environment is sane... yes
[INFO] checking for a thread-safe mkdir -p... /bin/mkdir -p
[INFO] checking for gawk... no
[INFO] checking for mawk... mawk
[INFO] checking whether make sets $(MAKE)... yes
[INFO] checking build system type... x86_64-unknown-linux-gnu
[INFO] checking host system type... x86_64-unknown-linux-gnu
[INFO] checking for style of include used by make... GNU
[INFO] checking for gcc... gcc
[INFO] checking whether the C compiler works... yes
[INFO] checking for C compiler default output file name... a.out
[INFO] checking for suffix of executables... 
[INFO] checking whether we are cross compiling... no
[INFO] checking for suffix of object files... o
[INFO] checking whether we are using the GNU C compiler... yes
[INFO] checking whether gcc accepts -g... yes
[INFO] checking for gcc option to accept ISO C89... none needed
[INFO] checking dependency style of gcc... gcc3
[INFO] checking for a sed that does not truncate output... /bin/sed
[INFO] checking for grep that handles long lines and -e... /bin/grep
[INFO] checking for egrep... /bin/grep -E
[INFO] checking for fgrep... /bin/grep -F
[INFO] checking for ld used by gcc... /usr/bin/ld
[INFO] checking if the linker (/usr/bin/ld) is GNU ld... yes
[INFO] checking for BSD- or MS-compatible name lister (nm)... /usr/bin/nm -B
[INFO] checking the name lister (/usr/bin/nm -B) interface... BSD nm
[INFO] checking whether ln -s works... yes
[INFO] checking the maximum length of command line arguments... 1572864
[INFO] checking whether the shell understands some XSI constructs... yes
[INFO] checking whether the shell understands "+="... yes
[INFO] checking for /usr/bin/ld option to reload object files... -r
[INFO] checking for objdump... objdump
[INFO] checking how to recognize dependent libraries... pass_all
[INFO] checking for ar... ar
[INFO] checking for strip... strip
[INFO] checking for ranlib... ranlib
[INFO] checking command to parse /usr/bin/nm -B output from gcc object... ok
[INFO] checking how to run the C preprocessor... gcc -E
[INFO] checking for ANSI C header files... yes
[INFO] checking for sys/types.h... yes
[INFO] checking for sys/stat.h... yes
[INFO] checking for stdlib.h... yes
[INFO] checking for string.h... yes
[INFO] checking for memory.h... yes
[INFO] checking for strings.h... yes
[INFO] checking for inttypes.h... yes
[INFO] checking for stdint.h... yes
[INFO] checking for unistd.h... yes
[INFO] checking for dlfcn.h... yes
[INFO] checking for objdir... .libs
[INFO] checking if gcc supports -fno-rtti -fno-exceptions... no
[INFO] checking for gcc option to produce PIC... -fPIC -DPIC
[INFO] checking if gcc PIC flag -fPIC -DPIC works... yes
[INFO] checking if gcc static flag -static works... yes
[INFO] checking if gcc supports -c -o file.o... yes
[INFO] checking if gcc supports -c -o file.o... (cached) yes
[INFO] checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
[INFO] checking whether -lc should be explicitly linked in... no
[INFO] checking dynamic linker characteristics... GNU/Linux ld.so
[INFO] checking how to hardcode library paths into programs... immediate
[INFO] checking whether stripping libraries is possible... yes
[INFO] checking if libtool supports shared libraries... yes
[INFO] checking whether to build shared libraries... yes
[INFO] checking whether to build static libraries... yes
[INFO] *** Current host ***
[INFO] checking cached host system type... ok
[INFO] *** C-Language compilation tools ***
[INFO] checking for gcc... (cached) gcc
[INFO] checking whether we are using the GNU C compiler... (cached) yes
[INFO] checking whether gcc accepts -g... (cached) yes
[INFO] checking for gcc option to accept ISO C89... (cached) none needed
[INFO] checking dependency style of gcc... (cached) gcc3
[INFO] checking for ranlib... (cached) ranlib
[INFO] *** Host support ***
[INFO] checking C flags dependant on host system type... ok
[INFO] *** Java compilation tools ***
[INFO] checking for sablevm... NONE
[INFO] checking for kaffe... NONE
[INFO] checking for javac... /home/jenkins/tools/java/latest/bin/javac
[INFO] /home/jenkins/tools/java/latest/bin/javac
[INFO] checking wether the Java compiler (/home/jenkins/tools/java/latest/bin/javac) works... yes
[INFO] checking for jar... /home/jenkins/tools/java/latest/bin/jar
[INFO] checking where on earth this jvm library is..... ohh u there ... /home/jenkins/tools/java/latest/jre/lib/i386/server 
[INFO] VALUE OF JVM_ARCH IS :32
[INFO] gcc flags added
[INFO] checking for gcc... (cached) gcc
[INFO] checking whether we are using the GNU C compiler... (cached) yes
[INFO] checking whether gcc accepts -g... (cached) yes
[INFO] checking for gcc option to accept ISO C89... (cached) none needed
[INFO] checking dependency style of gcc... (cached) gcc3
[INFO] checking for size_t... no
[INFO] checking for strdup... no
[INFO] checking for strerror... no
[INFO] checking for strtoul... no
[INFO] checking fcntl.h usability... no
[INFO] checking fcntl.h presence... yes
[INFO] configure: WARNING: fcntl.h: present but cannot be compiled
[INFO] configure: WARNING: fcntl.h:     check for missing prerequisite headers?
[INFO] configure: WARNING: fcntl.h: see the Autoconf documentation
[INFO] configure: WARNING: fcntl.h:     section "Present But Cannot Be Compiled"
[INFO] configure: WARNING: fcntl.h: proceeding with the compiler's result
[INFO] configure: WARNING:     ## --------------------------------- ##
[INFO] configure: WARNING:     ## Report this to omalley@apache.org ##
[INFO] configure: WARNING:     ## --------------------------------- ##
[INFO] checking for fcntl.h... no
[INFO] checking for an ANSI C-conforming const... yes
[INFO] checking for working volatile... yes
[INFO] checking for stdbool.h that conforms to C99... yes
[INFO] checking for _Bool... no
[INFO] configure: creating ./config.status
[INFO] config.status: creating Makefile
[INFO] config.status: executing depfiles commands
[INFO] config.status: executing libtool commands
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:make-install (compile) @ hadoop-hdfs ---
[INFO] /bin/bash ./libtool --tag=CC   --mode=compile gcc -DPACKAGE_NAME=\"libhdfs\" -DPACKAGE_TARNAME=\"libhdfs\" -DPACKAGE_VERSION=\"0.1.0\" -DPACKAGE_STRING=\"libhdfs\ 0.1.0\" -DPACKAGE_BUGREPORT=\"omalley@apache.org\" -DPACKAGE_URL=\"\" -DPACKAGE=\"libhdfs\" -DVERSION=\"0.1.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\" -Dsize_t=unsigned\ int -DHAVE_STDBOOL_H=1 -I.     -g -O2 -DOS_LINUX -DDSO_DLFCN -DCPU=\"amd64\" -m32 -I/home/jenkins/tools/java/latest/include -I/home/jenkins/tools/java/latest/include/linux -Wall -Wstrict-prototypes -MT hdfs.lo -MD -MP -MF .deps/hdfs.Tpo -c -o hdfs.lo hdfs.c
[INFO] libtool: compile:  gcc -DPACKAGE_NAME=\"libhdfs\" -DPACKAGE_TARNAME=\"libhdfs\" -DPACKAGE_VERSION=\"0.1.0\" "-DPACKAGE_STRING=\"libhdfs 0.1.0\"" -DPACKAGE_BUGREPORT=\"omalley@apache.org\" -DPACKAGE_URL=\"\" -DPACKAGE=\"libhdfs\" -DVERSION=\"0.1.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\" "-Dsize_t=unsigned int" -DHAVE_STDBOOL_H=1 -I. -g -O2 -DOS_LINUX -DDSO_DLFCN -DCPU=\"amd64\" -m32 -I/home/jenkins/tools/java/latest/include -I/home/jenkins/tools/java/latest/include/linux -Wall -Wstrict-prototypes -MT hdfs.lo -MD -MP -MF .deps/hdfs.Tpo -c hdfs.c  -fPIC -DPIC -o .libs/hdfs.o
[INFO] In file included from /usr/include/features.h:378,
[INFO]                  from /usr/include/sys/types.h:27,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/include/gnu/stubs.h:7:27: error: gnu/stubs-32.h: No such file or directory
[INFO] In file included from /usr/include/sys/types.h:147,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: duplicate 'unsigned'
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: two or more data types in declaration specifiers
[INFO] make: *** [hdfs.lo] Error 1
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [22.602s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 23.145s
[INFO] Finished at: Tue Nov 01 05:02:48 UTC 2011
[INFO] Final Memory: 25M/242M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:make-maven-plugin:1.0-beta-1:make-install (compile) on project hadoop-hdfs: make returned an exit value != 0. Aborting build; see command output above for more information. -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Archiving artifacts
Publishing Clover coverage report...
Publishing Clover HTML report...
Publishing Clover XML report...
Publishing Clover coverage results...
Recording test results
Publishing Javadoc
ERROR: Publisher hudson.tasks.JavadocArchiver aborted due to exception
<https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/api> does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:474)
	at hudson.FilePath$34.hasMatch(FilePath.java:1801)
	at hudson.FilePath$34.invoke(FilePath.java:1710)
	at hudson.FilePath$34.invoke(FilePath.java:1701)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1995)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:287)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Recording fingerprints
Error updating JIRA issues. Saving issues for next build.
com.atlassian.jira.rpc.exception.RemotePermissionException: This issue does not exist or you don't have permission to view it.


Build failed in Jenkins: Hadoop-Hdfs-0.23-Build #58

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/58/changes>

Changes:

[todd] HDFS-2512. Add textual error message to data transfer protocol responses. Contributed by Todd Lipcon.

[szetszwo] svn merge -c 1195656 from trunk for HDFS-2385.

[acmurthy] Merge -c 1195579 from trunk to branch-0.23 to fix MAPREDUCE-3275.

[acmurthy] Merge -c 1195575 from trunk to branch-0.23 to fix MAPREDUCE-3035.

[amarrk] MAPREDUCE-3241. [Rumen] Fix Rumen to ignore the AMStartedEvent. (amarrk)

[amarrk] MAPREDUCE-3166. [Rumen] Make Rumen use job history api instead of relying on current history file name format. (Ravi Gummadi via amarrk)

[amarrk] MAPREDUCE-3157. [Rumen] Fix TraceBuilder to handle 0.20 history file names also. (Ravi Gummadi via amarrk)

------------------------------------------
[...truncated 7787 lines...]
80 KB   
81 KB   
        
Downloaded: http://repo1.maven.org/maven2/org/apache/maven/plugins/maven-checkstyle-plugin/2.6/maven-checkstyle-plugin-2.6.jar (81 KB at 137.9 KB/sec)
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS 0.23.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.4.1:clean (default-clean) @ hadoop-hdfs ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target>
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (hdfs) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes>
[INFO] Compiling 8 JSP source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
log4j:WARN No appenders could be found for logger (org.apache.jasper.JspC).
log4j:WARN Please initialize the log4j system properly.
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.270
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (secondary) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
[INFO] Compiling 1 JSP source file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.016
[INFO] 
[INFO] --- jspc-maven-plugin:2.0-alpha-3:compile (datanode) @ hadoop-hdfs ---
[WARNING] Compiled JSPs will not be added to the project and web.xml will not be modified, either because includeInProject is set to false or because the project's packaging is not 'war'.
[INFO] Compiling 3 JSP source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp>
WARN: The method class org.apache.commons.logging.impl.SLF4JLogFactory#release() was invoked.
WARN: Please see http://www.slf4j.org/codes.html for an explanation.
[INFO] Compiled completed in 0:00:00.021
[INFO] 
[INFO] --- build-helper-maven-plugin:1.5:add-source (add-source) @ hadoop-hdfs ---
[INFO] Source directory: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/generated-src/main/jsp> added.
[INFO] 
[INFO] --- maven-resources-plugin:2.4.3:resources (default-resources) @ hadoop-hdfs ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 2 resources
[INFO] 
[INFO] --- maven-compiler-plugin:2.3.2:compile (default-compile) @ hadoop-hdfs ---
[INFO] Compiling 328 source files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-web-xmls) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/hdfs/WEB-INF>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/secondary/WEB-INF>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps/datanode/WEB-INF>
     [copy] Copying 6 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/webapps>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (compile) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
     [copy] Copying 15 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/native>
     [copy] Copied 6 empty directories to 2 empty directories under <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/native>
[INFO] Executed tasks
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:autoreconf (compile) @ hadoop-hdfs ---
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:configure (compile) @ hadoop-hdfs ---
[INFO] checking for a BSD-compatible install... /usr/bin/install -c
[INFO] checking whether build environment is sane... yes
[INFO] checking for a thread-safe mkdir -p... /bin/mkdir -p
[INFO] checking for gawk... no
[INFO] checking for mawk... mawk
[INFO] checking whether make sets $(MAKE)... yes
[INFO] checking build system type... x86_64-unknown-linux-gnu
[INFO] checking host system type... x86_64-unknown-linux-gnu
[INFO] checking for style of include used by make... GNU
[INFO] checking for gcc... gcc
[INFO] checking whether the C compiler works... yes
[INFO] checking for C compiler default output file name... a.out
[INFO] checking for suffix of executables... 
[INFO] checking whether we are cross compiling... no
[INFO] checking for suffix of object files... o
[INFO] checking whether we are using the GNU C compiler... yes
[INFO] checking whether gcc accepts -g... yes
[INFO] checking for gcc option to accept ISO C89... none needed
[INFO] checking dependency style of gcc... gcc3
[INFO] checking for a sed that does not truncate output... /bin/sed
[INFO] checking for grep that handles long lines and -e... /bin/grep
[INFO] checking for egrep... /bin/grep -E
[INFO] checking for fgrep... /bin/grep -F
[INFO] checking for ld used by gcc... /usr/bin/ld
[INFO] checking if the linker (/usr/bin/ld) is GNU ld... yes
[INFO] checking for BSD- or MS-compatible name lister (nm)... /usr/bin/nm -B
[INFO] checking the name lister (/usr/bin/nm -B) interface... BSD nm
[INFO] checking whether ln -s works... yes
[INFO] checking the maximum length of command line arguments... 1572864
[INFO] checking whether the shell understands some XSI constructs... yes
[INFO] checking whether the shell understands "+="... yes
[INFO] checking for /usr/bin/ld option to reload object files... -r
[INFO] checking for objdump... objdump
[INFO] checking how to recognize dependent libraries... pass_all
[INFO] checking for ar... ar
[INFO] checking for strip... strip
[INFO] checking for ranlib... ranlib
[INFO] checking command to parse /usr/bin/nm -B output from gcc object... ok
[INFO] checking how to run the C preprocessor... gcc -E
[INFO] checking for ANSI C header files... yes
[INFO] checking for sys/types.h... yes
[INFO] checking for sys/stat.h... yes
[INFO] checking for stdlib.h... yes
[INFO] checking for string.h... yes
[INFO] checking for memory.h... yes
[INFO] checking for strings.h... yes
[INFO] checking for inttypes.h... yes
[INFO] checking for stdint.h... yes
[INFO] checking for unistd.h... yes
[INFO] checking for dlfcn.h... yes
[INFO] checking for objdir... .libs
[INFO] checking if gcc supports -fno-rtti -fno-exceptions... no
[INFO] checking for gcc option to produce PIC... -fPIC -DPIC
[INFO] checking if gcc PIC flag -fPIC -DPIC works... yes
[INFO] checking if gcc static flag -static works... yes
[INFO] checking if gcc supports -c -o file.o... yes
[INFO] checking if gcc supports -c -o file.o... (cached) yes
[INFO] checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
[INFO] checking whether -lc should be explicitly linked in... no
[INFO] checking dynamic linker characteristics... GNU/Linux ld.so
[INFO] checking how to hardcode library paths into programs... immediate
[INFO] checking whether stripping libraries is possible... yes
[INFO] checking if libtool supports shared libraries... yes
[INFO] checking whether to build shared libraries... yes
[INFO] checking whether to build static libraries... yes
[INFO] *** Current host ***
[INFO] checking cached host system type... ok
[INFO] *** C-Language compilation tools ***
[INFO] checking for gcc... (cached) gcc
[INFO] checking whether we are using the GNU C compiler... (cached) yes
[INFO] checking whether gcc accepts -g... (cached) yes
[INFO] checking for gcc option to accept ISO C89... (cached) none needed
[INFO] checking dependency style of gcc... (cached) gcc3
[INFO] checking for ranlib... (cached) ranlib
[INFO] *** Host support ***
[INFO] checking C flags dependant on host system type... ok
[INFO] *** Java compilation tools ***
[INFO] checking for sablevm... NONE
[INFO] checking for kaffe... NONE
[INFO] checking for javac... /home/jenkins/tools/java/latest/bin/javac
[INFO] /home/jenkins/tools/java/latest/bin/javac
[INFO] checking wether the Java compiler (/home/jenkins/tools/java/latest/bin/javac) works... yes
[INFO] checking for jar... /home/jenkins/tools/java/latest/bin/jar
[INFO] checking where on earth this jvm library is..... ohh u there ... /home/jenkins/tools/java/latest/jre/lib/i386/server 
[INFO] VALUE OF JVM_ARCH IS :32
[INFO] gcc flags added
[INFO] checking for gcc... (cached) gcc
[INFO] checking whether we are using the GNU C compiler... (cached) yes
[INFO] checking whether gcc accepts -g... (cached) yes
[INFO] checking for gcc option to accept ISO C89... (cached) none needed
[INFO] checking dependency style of gcc... (cached) gcc3
[INFO] checking for size_t... no
[INFO] checking for strdup... no
[INFO] checking for strerror... no
[INFO] checking for strtoul... no
[INFO] checking fcntl.h usability... no
[INFO] checking fcntl.h presence... yes
[INFO] configure: WARNING: fcntl.h: present but cannot be compiled
[INFO] configure: WARNING: fcntl.h:     check for missing prerequisite headers?
[INFO] configure: WARNING: fcntl.h: see the Autoconf documentation
[INFO] configure: WARNING: fcntl.h:     section "Present But Cannot Be Compiled"
[INFO] configure: WARNING: fcntl.h: proceeding with the compiler's result
[INFO] configure: WARNING:     ## --------------------------------- ##
[INFO] configure: WARNING:     ## Report this to omalley@apache.org ##
[INFO] configure: WARNING:     ## --------------------------------- ##
[INFO] checking for fcntl.h... no
[INFO] checking for an ANSI C-conforming const... yes
[INFO] checking for working volatile... yes
[INFO] checking for stdbool.h that conforms to C99... yes
[INFO] checking for _Bool... no
[INFO] configure: creating ./config.status
[INFO] config.status: creating Makefile
[INFO] config.status: executing depfiles commands
[INFO] config.status: executing libtool commands
[INFO] 
[INFO] --- make-maven-plugin:1.0-beta-1:make-install (compile) @ hadoop-hdfs ---
[INFO] /bin/bash ./libtool --tag=CC   --mode=compile gcc -DPACKAGE_NAME=\"libhdfs\" -DPACKAGE_TARNAME=\"libhdfs\" -DPACKAGE_VERSION=\"0.1.0\" -DPACKAGE_STRING=\"libhdfs\ 0.1.0\" -DPACKAGE_BUGREPORT=\"omalley@apache.org\" -DPACKAGE_URL=\"\" -DPACKAGE=\"libhdfs\" -DVERSION=\"0.1.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\" -Dsize_t=unsigned\ int -DHAVE_STDBOOL_H=1 -I.     -g -O2 -DOS_LINUX -DDSO_DLFCN -DCPU=\"amd64\" -m32 -I/home/jenkins/tools/java/latest/include -I/home/jenkins/tools/java/latest/include/linux -Wall -Wstrict-prototypes -MT hdfs.lo -MD -MP -MF .deps/hdfs.Tpo -c -o hdfs.lo hdfs.c
[INFO] libtool: compile:  gcc -DPACKAGE_NAME=\"libhdfs\" -DPACKAGE_TARNAME=\"libhdfs\" -DPACKAGE_VERSION=\"0.1.0\" "-DPACKAGE_STRING=\"libhdfs 0.1.0\"" -DPACKAGE_BUGREPORT=\"omalley@apache.org\" -DPACKAGE_URL=\"\" -DPACKAGE=\"libhdfs\" -DVERSION=\"0.1.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\" "-Dsize_t=unsigned int" -DHAVE_STDBOOL_H=1 -I. -g -O2 -DOS_LINUX -DDSO_DLFCN -DCPU=\"amd64\" -m32 -I/home/jenkins/tools/java/latest/include -I/home/jenkins/tools/java/latest/include/linux -Wall -Wstrict-prototypes -MT hdfs.lo -MD -MP -MF .deps/hdfs.Tpo -c hdfs.c  -fPIC -DPIC -o .libs/hdfs.o
[INFO] In file included from /usr/include/features.h:378,
[INFO]                  from /usr/include/sys/types.h:27,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/include/gnu/stubs.h:7:27: error: gnu/stubs-32.h: No such file or directory
[INFO] In file included from /usr/include/sys/types.h:147,
[INFO]                  from hdfs.h:22,
[INFO]                  from hdfs.c:19:
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: duplicate 'unsigned'
[INFO] /usr/lib/gcc/x86_64-linux-gnu/4.4.3/include/stddef.h:211: error: two or more data types in declaration specifiers
[INFO] make: *** [hdfs.lo] Error 1
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [22.162s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 23.890s
[INFO] Finished at: Mon Oct 31 22:09:40 UTC 2011
[INFO] Final Memory: 27M/275M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:make-maven-plugin:1.0-beta-1:make-install (compile) on project hadoop-hdfs: make returned an exit value != 0. Aborting build; see command output above for more information. -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Archiving artifacts
Publishing Clover coverage report...
Publishing Clover HTML report...
Publishing Clover XML report...
Publishing Clover coverage results...
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Publishing Javadoc
ERROR: Publisher hudson.tasks.JavadocArchiver aborted due to exception
<https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/api> does not exist.
	at org.apache.tools.ant.types.AbstractFileSet.getDirectoryScanner(AbstractFileSet.java:474)
	at hudson.FilePath$34.hasMatch(FilePath.java:1801)
	at hudson.FilePath$34.invoke(FilePath.java:1710)
	at hudson.FilePath$34.invoke(FilePath.java:1701)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1995)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:287)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Recording fingerprints
Updating MAPREDUCE-3157
Updating MAPREDUCE-3166
Updating MAPREDUCE-3035
Updating HDFS-2385
Updating MAPREDUCE-3275
Updating HDFS-2512
Updating MAPREDUCE-3241


Build failed in Jenkins: Hadoop-Hdfs-0.23-Build #56

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/56/changes>

Changes:

[vinodkv] MAPREDUCE-3262. Fixed Container's state-machine in NodeManager to handle a couple of events in failure states correctly. Contributed by Hitesh Shah and Siddharth Seth.
svn merge -c r1195416 --ignore-ancestry ../../trunk/

[vinodkv] MAPREDUCE-2696. Fixed NodeManager to cleanup logs in a thread when logs' aggregation is not enabled. Contributed by Siddharth Seth.
svn merge -c r1195383 --ignore-ancestry ../../trunk/

[vinodkv] MAPREDUCE-3146. Added a MR specific command line to dump logs for a given TaskAttemptID. Contributed by Siddharth Seth.
svn merge -c r1195349 --ignore-ancestry ../../trunk

[vinodkv] MAPREDUCE-2766. Fixed NM to set secure permissions for files and directories in distributed-cache. Contributed by Hitesh Shah.            
svn merge -c r1195340 --ignore-ancestry ../../trunk/

[acmurthy] Adding executable svn prop on mock-container-executor.

[vinodkv] MAPREDUCE-3313. Fixed initialization of ClusterMetrics which was failing TestResourceTrackerService sometimes. Contributed by Hitesh Shah.
svn merge -c r1195319 --ignore-ancestry ../../trunk/

[mahadev] MAPREDUCE-3171 merge from trunk reverted changes from MAPREDUCE-2747 MAPREDUCE-3240.

[vinodkv] MAPREDUCE-3274. Fixed a race condition in MRAppMaster that was causing a task-scheduling deadlock. Contributed by Robert Joseph Evans.
svn merge -c r1195145 --ignore-ancestry ../../trunk/

------------------------------------------
[...truncated 9549 lines...]
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/common/JspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDataset$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDataset$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/browseBlock_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/BlockReceiver$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$BlockPoolManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/browseDirectory_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/tail_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataStorage$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataNode$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/FSDatasetAsyncDiskService$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods$3$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$6.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FileJournalManager$EditLogFile$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfsnodelist_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$7.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper$NamenodeMXBeanHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FileDataServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/block_005finfo_005fxml_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer$1$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/BackupNode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/corrupt_005freplicas_005fxml_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FsckServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSNamesystem$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/BackupImage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NameNode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target>
  [javadoc] JDiff: finished (took 0s, not including scanning the source files).
  [javadoc] /classes/org/apache/hadoop/hdfs/server/namenode/decommission_jsp.class]
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/status_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetImageServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/GetImageServlet$1$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/LeaseManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/nn_005fbrowsedfscontent_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImage$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfshealth_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/dfsclusterhealth_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp$BlockTwo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/corrupt_005ffiles_jsp.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/UpgradeManagerNamenode$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/FSEditLog$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$3.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$5.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$6.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods$4.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand$RecoveringBlock$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand$2.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/BalancerBandwidthCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/GetConf$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/DFSck$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsLoaderCurrent$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/util/CyclicIteration$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/util/LightWeightGSet$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/WebHdfsFileSystem$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/ParamFilter$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator$1.class]>
  [javadoc] [loading <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/classes/org/apache/hadoop/hdfs/web/resources/Param$1.class]>
  [javadoc] [done in 1925 ms]
  [javadoc] Generating Javadoc
  [javadoc] Javadoc execution
  [javadoc] javadoc: error - Illegal package name: ""
  [javadoc] javadoc: error - File not found: "<https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/dev-support/jdiff/Null.java">
  [javadoc] Loading source files for package org.apache.hadoop.fs...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol.datatransfer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.protocol.proto...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.security.token.block...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.security.token.delegation...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.balancer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.blockmanagement...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.common...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.datanode.web.resources...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.namenode.web.resources...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.server.protocol...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools.offlineEditsViewer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.tools.offlineImageViewer...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.util...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.web...
  [javadoc] Loading source files for package org.apache.hadoop.hdfs.web.resources...
  [javadoc] 2 errors
     [xslt] Processing <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/findbugsXml.xml> to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/site/findbugs.html>
     [xslt] Loading stylesheet /home/jenkins/tools/findbugs/latest/src/xsl/default.xsl
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (pre-dist) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (xprepare-package-hadoop-daemon) @ hadoop-hdfs ---
[INFO] Executing tasks

main:
      [get] Destination already exists (skipping): <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/downloads/commons-daemon-1.0.3-bin-linux-i686.tar.gz>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging>
    [untar] Expanding: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/downloads/commons-daemon-1.0.3-bin-linux-i686.tar.gz> into <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-0.23.0-SNAPSHOT/libexec>
     [copy] Copying <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/commons-daemon.staging/jsvc> to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-0.23.0-SNAPSHOT/libexec/jsvc>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-jar-plugin:2.3.1:jar (default-jar) @ hadoop-hdfs ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs ---
[INFO] 
[INFO] --- maven-assembly-plugin:2.2-beta-3:single (src-dist) @ hadoop-hdfs ---
[INFO] Reading assembly descriptor: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [2:12.439s]
[INFO] Apache Hadoop HDFS Project ........................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 2:12.929s
[INFO] Finished at: Mon Oct 31 11:35:58 UTC 2011
[INFO] Final Memory: 44M/358M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3:single (src-dist) on project hadoop-hdfs: Error reading assemblies: Error locating assembly descriptor: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] 
[ERROR] [1] [INFO] Searching for file location: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml>
[ERROR] 
[ERROR] [2] [INFO] File: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml> does not exist.
[ERROR] 
[ERROR] [3] [INFO] Invalid artifact specification: 'hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml'. Must contain at least three fields, separated by ':'.
[ERROR] 
[ERROR] [4] [INFO] Failed to resolve classpath resource: /assemblies/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml from classloader: ClassRealm[plugin>org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3, parent: sun.misc.Launcher$AppClassLoader@182f0db]
[ERROR] 
[ERROR] [5] [INFO] Failed to resolve classpath resource: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml from classloader: ClassRealm[plugin>org.apache.maven.plugins:maven-assembly-plugin:2.2-beta-3, parent: sun.misc.Launcher$AppClassLoader@182f0db]
[ERROR] 
[ERROR] [6] [INFO] File: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml> does not exist.
[ERROR] 
[ERROR] [7] [INFO] Building URL from location: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] Error:
[ERROR] java.net.MalformedURLException: no protocol: hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
[ERROR] at java.net.URL.<init>(URL.java:567)
[ERROR] at java.net.URL.<init>(URL.java:464)
[ERROR] at java.net.URL.<init>(URL.java:413)
[ERROR] at org.apache.maven.shared.io.location.URLLocatorStrategy.resolve(URLLocatorStrategy.java:54)
[ERROR] at org.apache.maven.shared.io.location.Locator.resolve(Locator.java:81)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.addAssemblyFromDescriptor(DefaultAssemblyReader.java:309)
[ERROR] at org.apache.maven.plugin.assembly.io.DefaultAssemblyReader.readAssemblies(DefaultAssemblyReader.java:140)
[ERROR] at org.apache.maven.plugin.assembly.mojos.AbstractAssemblyMojo.execute(AbstractAssemblyMojo.java:328)
[ERROR] at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:101)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:209)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
[ERROR] at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:84)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:59)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.singleThreadedBuild(LifecycleStarter.java:183)
[ERROR] at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:161)
[ERROR] at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:319)
[ERROR] at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:156)
[ERROR] at org.apache.maven.cli.MavenCli.execute(MavenCli.java:537)
[ERROR] at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:196)
[ERROR] at org.apache.maven.cli.MavenCli.main(MavenCli.java:141)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[ERROR] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[ERROR] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[ERROR] at java.lang.reflect.Method.invoke(Method.java:597)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:290)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:230)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:409)
[ERROR] at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:352)
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Build step 'Execute shell' marked build as failure
Archiving artifacts
Publishing Clover coverage report...
Clover xml file does not exist in: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover> called: clover.xml and will not be copied to: /home/hudson/hudson/jobs/Hadoop-Hdfs-0.23-Build/builds/2011-10-31_11-31-21/clover.xml
Could not find 'trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover/clover.xml'.  Did you generate the XML report for Clover?
Recording test results
Publishing Javadoc
Recording fingerprints
Updating MAPREDUCE-3313
Updating MAPREDUCE-2766
Updating MAPREDUCE-3262
Updating MAPREDUCE-3274
Updating MAPREDUCE-3171
Updating MAPREDUCE-3146
Updating MAPREDUCE-2747
Updating MAPREDUCE-3240
Updating MAPREDUCE-2696