You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@nifi.apache.org by "Joe Witt (Jira)" <ji...@apache.org> on 2021/03/17 16:08:00 UTC

[jira] [Updated] (NIFI-8331) PutHDFS sometime complain "File does not exist" and lost file

     [ https://issues.apache.org/jira/browse/NIFI-8331?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Joe Witt updated NIFI-8331:
---------------------------
    Priority: Critical  (was: Major)

> PutHDFS sometime complain "File does not exist" and lost file
> -------------------------------------------------------------
>
>                 Key: NIFI-8331
>                 URL: https://issues.apache.org/jira/browse/NIFI-8331
>             Project: Apache NiFi
>          Issue Type: Bug
>          Components: Extensions
>    Affects Versions: 1.13.1
>         Environment: nifi 1.13.1
> Linux hb3-prod-gem-bnpmp-001 3.10.0-1160.15.2.el7.x86_64 #1 SMP Wed Feb 3 15:06:38 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux
> openjdk version "1.8.0_282"
> OpenJDK Runtime Environment (build 1.8.0_282-b08)
> OpenJDK 64-Bit Server VM (build 25.282-b08, mixed mode)
>            Reporter: macdoor615
>            Priority: Critical
>             Fix For: 1.14.0
>
>
> # upgrade to nifi 1.13.1 from 1.13.0, and run same flow.xml.gz
>  # PutHDFS sometime complain "File does not exist" and lost file, throw  
> {code:java}
> 2021-03-17 11:21:04,443 WARN [Thread-919036] org.apache.hadoop.hdfs.DataStreamer DataStreamer Exception
> java.io.FileNotFoundException: File does not exist: /data/ftp/BAOMIHUIYI/B200/POID-00083/.100141_B200_141_202103170800_202103170900.txt (inode 152965963) [Lease. Holder: DFSClient_NONMAPREDUCE_360664168_65, pending creates: 1]
>  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3050)
>  at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:610)
>  at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:171)
>  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2927)
>  at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:908)
>  at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
>  at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>  at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:532)
>  at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070)
>  at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1020)
>  at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:948)
>  at java.security.AccessController.doPrivileged(Native Method)
>  at javax.security.auth.Subject.doAs(Subject.java:422)
>  at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1845)
>  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2952)
> at sun.reflect.GeneratedConstructorAccessor197.newInstance(Unknown Source)
>  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>  at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>  at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>  at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>  at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1093)
>  at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1867)
>  at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1669)
>  at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:715)
> Caused by: org.apache.hadoop.ipc.RemoteException: File does not exist: /data/ftp/BAOMIHUIYI/B200/POID-00083/.100141_B200_141_202103170800_202103170900.txt (inode 152965963) [Lease. Holder: DFSClient_NONMAPREDUCE_360664168_65, pending creates: 1]
>  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:3050)
>  at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:610)
>  at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:171)
>  at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2927)
>  at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:908)
>  at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
>  at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>  at org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:532)
>  at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1070)
>  at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1020)
>  at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:948)
>  at java.security.AccessController.doPrivileged(Native Method)
>  at javax.security.auth.Subject.doAs(Subject.java:422)
>  at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1845)
>  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2952)
> at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1562)
>  at org.apache.hadoop.ipc.Client.call(Client.java:1508)
>  at org.apache.hadoop.ipc.Client.call(Client.java:1405)
>  at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:234)
>  at org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:119)
>  at com.sun.proxy.$Proxy215.addBlock(Unknown Source)
>  at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:530)
>  at sun.reflect.GeneratedMethodAccessor106.invoke(Unknown Source)
>  at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>  at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>  at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>  at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>  at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>  at com.sun.proxy.$Proxy234.addBlock(Unknown Source)
>  at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1090)
>  ... 3 common frames omitted
> {code}
>  
>  # downgrade to 1.13.0, everything is okay again



--
This message was sent by Atlassian Jira
(v8.3.4#803005)