You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/01/17 13:51:59 UTC

Build failed in Jenkins: Hadoop-Hdfs-0.23-Build #141

See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/141/changes>

Changes:

[acmurthy] Merge -c 1232322 from trunk to branch-0.23 to fix HADOOP-7971. Adding back job/pipes/queue commands to bin/hadoop for backward compatibility.

[acmurthy] Merge -c 1232314 from trunk to branch-0.23 to fix MAPREDUCE-2450. MAPREDUCE-2450. Fixed a corner case with interrupted communication threads leading to a long timeout in Task.

[eli] HDFS-362. svn merge -c 1171945 from trunk

[vinodkv] MAPREDUCE-3641. Making CapacityScheduler more conservative so as to assign only one off-switch container in a single scheduling iteration. Contributed by Arun C Murthy.
svn merge --ignore-ancestry -c 1232182 ../../trunk/

[mahadev] MAPREDUCE-3657. State machine visualize build fails. (Jason LoweMAPREDUCE-3657. State machine visualize build fails. (Jason Lowe via mahadev) - Merging r1232167 from trunk.

[mahadev] MAPREDUCE-3649. Job End notification gives an error on calling back. (Ravi Prakash via mahadev) - Merging r1232126 from trunk.

[jitendra] Merged r1230708 from trunk for MAPREDUCE-3664.

------------------------------------------
[...truncated 13899 lines...]
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/HttpFSServerWebApp.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/HttpFSServer.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/HttpFSReleaseFilter.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSAppend.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSContentSummary.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSCreate.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSDelete.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSFileChecksum.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSFileStatus.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSHomeDir.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSListStatus.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSMkdirs.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSOpen.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSRename.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSSetOwner.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSSetPermission.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSSetReplication.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/FSOperations.FSSetTimes.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//class-use/HttpFSExceptionProvider.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpPseudoAuthenticator.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.FILE_TYPE.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.GetOpValues.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.PostOpValues.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.PutOpValues.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpFSFileSystem.DeleteOpValues.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//class-use/HttpKerberosAuthenticator.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/ServiceException.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/Server.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/Server.Status.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/BaseService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/ServerException.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/ServerException.ERROR.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//class-use/Service.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/BooleanParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/JSONMapProvider.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/Param.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/JSONProvider.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/ByteParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/UserProvider.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/LongParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/ShortParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/ExceptionProvider.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/IntegerParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/StringParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/EnumParam.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//class-use/InputStreamEntity.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/Scheduler.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/ProxyUser.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/FileSystemAccess.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/FileSystemAccess.FileSystemExecutor.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/FileSystemAccessException.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/FileSystemAccessException.ERROR.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/Instrumentation.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/Instrumentation.Cron.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/Instrumentation.Variable.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//class-use/Groups.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/security//class-use/GroupsService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/security//class-use/ProxyUserService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/security//class-use/ProxyUserService.ERROR.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/scheduler//class-use/SchedulerService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/hadoop//class-use/FileSystemAccessService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/instrumentation//class-use/InstrumentationService.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/lang//class-use/RunnableCallable.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/lang//class-use/XException.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/lang//class-use/XException.ERROR.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/servlet//class-use/HostnameFilter.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/servlet//class-use/ServerWebApp.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/servlet//class-use/FileSystemReleaseFilter.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/servlet//class-use/MDCFilter.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/util//class-use/Check.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/util//class-use/ConfigurationUtils.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/client//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/fs/http/server//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/lang//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/server//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/hadoop//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/instrumentation//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/scheduler//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/service/security//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/servlet//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/util//package-use.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/org/apache/hadoop/lib/wsrs//package-use.html...>
Building index for all the packages and classes...
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/overview-tree.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/index-all.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/deprecated-list.html...>
Building index for all classes...
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/allclasses-frame.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/allclasses-noframe.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/index.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/overview-summary.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/help-doc.html...>
Generating <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/apidocs/stylesheet.css...>
23 warnings
[WARNING] Javadoc Warnings
[WARNING] org/apache/hadoop/fs/FileSystem.class(org/apache/hadoop/fs:FileSystem.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate': class file for org.apache.hadoop.classification.InterfaceAudience not found
[WARNING] org/apache/hadoop/fs/FileSystem.class(org/apache/hadoop/fs:FileSystem.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FileSystem.class(org/apache/hadoop/fs:FileSystem.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/Path.class(org/apache/hadoop/fs:Path.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FSDataInputStream.class(org/apache/hadoop/fs:FSDataInputStream.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FSDataOutputStream.class(org/apache/hadoop/fs:FSDataOutputStream.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/security/UserGroupInformation.class(org/apache/hadoop/security:UserGroupInformation.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/security/UserGroupInformation.class(org/apache/hadoop/security:UserGroupInformation.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/security/UserGroupInformation.class(org/apache/hadoop/security:UserGroupInformation.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/security/Groups.class(org/apache/hadoop/security:Groups.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java>:241: warning - @param argument "override," is not a parameter name.
[WARNING] <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java>:450: warning - @param argument "override," is not a parameter name.
[WARNING] org/apache/hadoop/fs/UnresolvedLinkException.class(org/apache/hadoop/fs:UnresolvedLinkException.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.class(org/apache/hadoop/fs:MD5MD5CRC32FileChecksum.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/LocalDirAllocator.class(org/apache/hadoop/fs:LocalDirAllocator.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FileContext.class(org/apache/hadoop/fs:FileContext.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FileContext.class(org/apache/hadoop/fs:FileContext.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FSOutputSummer.class(org/apache/hadoop/fs:FSOutputSummer.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FSInputStream.class(org/apache/hadoop/fs:FSInputStream.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/FSInputChecker.class(org/apache/hadoop/fs:FSInputChecker.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/DU.class(org/apache/hadoop/fs:DU.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/DF.class(org/apache/hadoop/fs:DF.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[WARNING] org/apache/hadoop/fs/AbstractFileSystem.class(org/apache/hadoop/fs:AbstractFileSystem.class): warning: Cannot find annotation method 'value()' in type 'org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate'
[INFO] Building jar: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT-javadoc.jar>
[WARNING] Artifact org.apache.hadoop:hadoop-hdfs-httpfs:javadoc:javadoc:0.23.1-SNAPSHOT already attached to project, ignoring duplicate
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-httpfs ---
[WARNING] Artifact org.apache.hadoop:hadoop-hdfs-httpfs:java-source:sources:0.23.1-SNAPSHOT already attached to project, ignoring duplicate
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs-httpfs ---
[INFO] 
[INFO] --- maven-assembly-plugin:2.2.1:single (dist) @ hadoop-hdfs-httpfs ---
[INFO] Copying files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (dist) @ hadoop-hdfs-httpfs ---
[INFO] Executing tasks

main:
      [get] Destination already exists (skipping): <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads/tomcat.tar.gz>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/tomcat.exp>
     [move] Moving 491 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs>
   [delete] Deleting directory <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/tomcat.exp>
   [delete] Deleting directory <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/webapps>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/webapps>
   [delete] Deleting: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/conf/server.xml>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/conf>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/conf>
     [copy] Copying 2 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/webapps/ROOT>
     [copy] Copying 167 files to <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/webapps/webhdfs>
     [copy] Copied 23 empty directories to 1 empty directory under <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-0.23.1-SNAPSHOT/share/hadoop/httpfs/tomcat/webapps/webhdfs>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (tar) @ hadoop-hdfs-httpfs ---
[INFO] Executing tasks

main:
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.6:checkstyle (default-cli) @ hadoop-hdfs-httpfs ---
[INFO] 
[INFO] There are 1693 checkstyle errors.
[WARNING] Unable to locate Source XRef to link to - DISABLED
[INFO] 
[INFO] --- findbugs-maven-plugin:2.3.2:findbugs (default-cli) @ hadoop-hdfs-httpfs ---
[INFO] ****** FindBugsMojo execute *******
[INFO] canGenerate is true
[INFO] ****** FindBugsMojo executeFindbugs *******
[INFO] Temp File is <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/findbugsTemp.xml>
[INFO] Fork Value is true
     [java] The following classes needed for analysis were missing:
     [java]   org.apache.hadoop.classification.InterfaceAudience$Private
     [java]   org.apache.hadoop.classification.InterfaceStability$Stable
     [java]   org.apache.hadoop.classification.InterfaceAudience
     [java]   org.apache.hadoop.classification.InterfaceStability$Evolving
     [java]   org.apache.hadoop.classification.InterfaceStability
     [java]   org.apache.hadoop.classification.InterfaceAudience$Public
     [java]   org.apache.hadoop.classification.InterfaceAudience$LimitedPrivate
     [java] Missing classes: 7
[INFO] xmlOutput is false
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 0.23.1-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.4.1:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-javadoc-plugin:2.7:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-install-plugin:2.3.1:install (default-install) @ hadoop-hdfs-project ---
[INFO] Installing <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/pom.xml> to /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-hdfs-project/0.23.1-SNAPSHOT/hadoop-hdfs-project-0.23.1-SNAPSHOT.pom
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-javadoc-plugin:2.7:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.6:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:2.3.2:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ****** FindBugsMojo execute *******
[INFO] canGenerate is false
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ SUCCESS [5:47.118s]
[INFO] Apache Hadoop HttpFS .............................. SUCCESS [43.918s]
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [0.059s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 6:31.538s
[INFO] Finished at: Tue Jan 17 11:40:33 UTC 2012
[INFO] Final Memory: 71M/791M
[INFO] ------------------------------------------------------------------------
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Build step 'Execute shell' marked build as failure
Archiving artifacts
Publishing Clover coverage report...
Clover xml file does not exist in: <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover> called: clover.xml and will not be copied to: /home/hudson/hudson/jobs/Hadoop-Hdfs-0.23-Build/builds/2012-01-17_11-30-46/clover.xml
Could not find 'trunk/hadoop-hdfs-project/hadoop-hdfs/target/clover/clover.xml'.  Did you generate the XML report for Clover?
Recording test results
Publishing Javadoc
Recording fingerprints
Updating MAPREDUCE-3657
Updating HADOOP-7971
Updating MAPREDUCE-3641
Updating HDFS-362
Updating MAPREDUCE-2450
Updating MAPREDUCE-3664
Updating MAPREDUCE-3649


Jenkins build is back to stable : Hadoop-Hdfs-0.23-Build #143

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/143/changes>



Jenkins build is unstable: Hadoop-Hdfs-0.23-Build #142

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/142/changes>



Hadoop-Hdfs-0.23-Build - Build # 142 - Still unstable

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-0.23-Build/142/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 14093 lines...]
main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-javadoc-plugin:2.7:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-install-plugin:2.3.1:install (default-install) @ hadoop-hdfs-project ---
[INFO] Installing /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-0.23-Build/trunk/hadoop-hdfs-project/pom.xml to /home/jenkins/.m2/repository/org/apache/hadoop/hadoop-hdfs-project/0.23.1-SNAPSHOT/hadoop-hdfs-project-0.23.1-SNAPSHOT.pom
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-javadoc-plugin:2.7:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.6:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:2.3.2:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ****** FindBugsMojo execute *******
[INFO] canGenerate is false
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ SUCCESS [5:40.587s]
[INFO] Apache Hadoop HttpFS .............................. SUCCESS [41.855s]
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [0.061s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 6:22.944s
[INFO] Finished at: Wed Jan 18 11:41:37 UTC 2012
[INFO] Final Memory: 72M/748M
[INFO] ------------------------------------------------------------------------
+ /home/jenkins/tools/maven/latest/bin/mvn test -Dmaven.test.failure.ignore=true -Pclover -DcloverLicenseLocation=/home/jenkins/tools/clover/latest/lib/clover.license
Archiving artifacts
Publishing Clover coverage report...
Publishing Clover HTML report...
Publishing Clover XML report...
Publishing Clover coverage results...
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Publishing Javadoc
Recording fingerprints
Updating MAPREDUCE-3669
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Unstable
Sending email for trigger: Unstable



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestLeaseRecovery2.testHardLeaseRecoveryWithRenameAfterNameNodeRestart

Error Message:
No lease on /hardLeaseRecovery File does not exist. Holder DFSClient_NONMAPREDUCE_-1411608839_1 does not have any open files.  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:1612)  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:1603)  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:1541)  at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:420)  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)  at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)  at java.lang.reflect.Method.invoke(Method.java:597)  at org.apache.hadoop.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:365)  at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1493)  at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1489)  at java.security.AccessController.doPrivileged(Native Method)  at javax.security.auth.Subject.doAs(Subject.java:396)  at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1157)  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1487) 

Stack Trace:
org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException: No lease on /hardLeaseRecovery File does not exist. Holder DFSClient_NONMAPREDUCE_-1411608839_1 does not have any open files.
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:1612)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:1603)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalDatanode(FSNamesystem.java:1541)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getAdditionalDatanode(NameNodeRpcServer.java:420)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:365)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1493)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1489)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1157)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1487)

	at org.apache.hadoop.ipc.Client.call(Client.java:1086)
	at org.apache.hadoop.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:193)
	at $Proxy15.getAdditionalDatanode(Unknown Source)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:100)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:65)
	at $Proxy15.getAdditionalDatanode(Unknown Source)
	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.addDatanode2ExistingPipeline(DFSOutputStream.java:828)
	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.setupPipelineForAppendOrRecovery(DFSOutputStream.java:930)
	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.processDatanodeError(DFSOutputStream.java:741)
	at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:416)