You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@hbase.apache.org by Alexander Batyrshin <0x...@gmail.com> on 2019/09/16 00:02:40 UTC

ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Looks like snapshot files somehow disappear at destination cluster

Complete stack trace:

2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id : attempt_1563449245683_0180_m_000043_2, Status : FAILED
Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
        at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
        at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
        at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
        at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)

        at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
        at org.apache.hadoop.ipc.Client.call(Client.java:1443)
        at org.apache.hadoop.ipc.Client.call(Client.java:1353)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
        at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
        at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
        at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
        ... 3 more

Re: ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Posted by Alexander Batyrshin <0x...@gmail.com>.
Any ideas how to export snapshot?
Looks like -mapper 1 fixes this issue, but I can’t confirm 100% because it tales too long

> On 16 Sep 2019, at 11:45, Alexander Batyrshin <0x...@gmail.com> wrote:
> 
> HBase version 1.4.10
> 
> Export command: sudo -u hadoop /opt/hbase/bin/hbase  org.apache.hadoop.hbase.snapshot.ExportSnapshot -snapshot snapshot-TABLE -copy-to hdfs://hbase-cluster-2.datahouse/hbase
> 
>> On 16 Sep 2019, at 11:32, Alexander Batyrshin <0x...@gmail.com> wrote:
>> 
>> I can’t find any logs from HBase Master cleaner about deleting this archive files.
>> 
>> 
>>> On 16 Sep 2019, at 03:02, Alexander Batyrshin <0x...@gmail.com> wrote:
>>> 
>>> Looks like snapshot files somehow disappear at destination cluster
>>> 
>>> Complete stack trace:
>>> 
>>> 2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id : attempt_1563449245683_0180_m_000043_2, Status : FAILED
>>> Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>>>      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>>      at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>>      at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>>      at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>>      at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>>      at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>>      at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>>      at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>>      at java.security.AccessController.doPrivileged(Native Method)
>>>      at javax.security.auth.Subject.doAs(Subject.java:422)
>>>      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>>      at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>>>      at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>      at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>>      at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>      at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>>      at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>>>      at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>>>      at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
>>>      at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
>>>      at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
>>>      at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
>>> Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>>>      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>>      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>>      at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>>      at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>>      at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>>      at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>>      at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>>      at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>>      at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>>      at java.security.AccessController.doPrivileged(Native Method)
>>>      at javax.security.auth.Subject.doAs(Subject.java:422)
>>>      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>>      at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>>>      at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
>>>      at org.apache.hadoop.ipc.Client.call(Client.java:1443)
>>>      at org.apache.hadoop.ipc.Client.call(Client.java:1353)
>>>      at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>>>      at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>>>      at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
>>>      at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
>>>      at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>      at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>      at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>      at java.lang.reflect.Method.invoke(Method.java:498)
>>>      at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>>>      at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>>>      at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>>>      at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>>>      at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>>>      at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
>>>      at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
>>>      ... 3 more
>> 
> 


Re: ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Posted by Alexander Batyrshin <0x...@gmail.com>.
HBase version 1.4.10

Export command: sudo -u hadoop /opt/hbase/bin/hbase  org.apache.hadoop.hbase.snapshot.ExportSnapshot -snapshot snapshot-TABLE -copy-to hdfs://hbase-cluster-2.datahouse/hbase

> On 16 Sep 2019, at 11:32, Alexander Batyrshin <0x...@gmail.com> wrote:
> 
> I can’t find any logs from HBase Master cleaner about deleting this archive files.
> 
> 
>> On 16 Sep 2019, at 03:02, Alexander Batyrshin <0x...@gmail.com> wrote:
>> 
>> Looks like snapshot files somehow disappear at destination cluster
>> 
>> Complete stack trace:
>> 
>> 2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id : attempt_1563449245683_0180_m_000043_2, Status : FAILED
>> Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>>       at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>       at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>       at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>       at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>       at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>       at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>       at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>       at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>       at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>       at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>       at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>       at java.security.AccessController.doPrivileged(Native Method)
>>       at javax.security.auth.Subject.doAs(Subject.java:422)
>>       at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>       at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>       at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>       at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>       at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>>       at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>>       at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
>>       at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
>>       at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
>>       at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
>> Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>>       at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>       at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>       at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>       at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>       at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>       at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>       at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>       at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>       at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>       at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>       at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>       at java.security.AccessController.doPrivileged(Native Method)
>>       at javax.security.auth.Subject.doAs(Subject.java:422)
>>       at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>       at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>>       at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
>>       at org.apache.hadoop.ipc.Client.call(Client.java:1443)
>>       at org.apache.hadoop.ipc.Client.call(Client.java:1353)
>>       at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>>       at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>>       at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
>>       at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
>>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>       at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>       at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>       at java.lang.reflect.Method.invoke(Method.java:498)
>>       at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>>       at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>>       at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>>       at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>>       at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>>       at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
>>       at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
>>       ... 3 more
> 


Re: ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Posted by Alexander Batyrshin <0x...@gmail.com>.
I can’t find any logs from HBase Master cleaner about deleting this archive files.


> On 16 Sep 2019, at 03:02, Alexander Batyrshin <0x...@gmail.com> wrote:
> 
> Looks like snapshot files somehow disappear at destination cluster
> 
> Complete stack trace:
> 
> 2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id : attempt_1563449245683_0180_m_000043_2, Status : FAILED
> Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>        at java.security.AccessController.doPrivileged(Native Method)
>        at javax.security.auth.Subject.doAs(Subject.java:422)
>        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>        at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
>        at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
>        at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
>        at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
> Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException): File does not exist: /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0 (inode 10503038) Holder DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have any open files.
>        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>        at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>        at java.security.AccessController.doPrivileged(Native Method)
>        at javax.security.auth.Subject.doAs(Subject.java:422)
>        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>        at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
>        at org.apache.hadoop.ipc.Client.call(Client.java:1443)
>        at org.apache.hadoop.ipc.Client.call(Client.java:1353)
>        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>        at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
>        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>        at java.lang.reflect.Method.invoke(Method.java:498)
>        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>        at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
>        at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
>        ... 3 more


Re: ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Posted by Alexander Batyrshin <0x...@gmail.com>.
I’ve checked logs from HBase master and didn’t find any logs about cleaning this files


> On 17 Sep 2019, at 02:05, 张铎(Duo Zhang) <pa...@gmail.com> wrote:
> 
> Try disabling the hfile cleaner at the destination cluster when exporting
> the snapshot?
> 
> Alexander Batyrshin <0x...@gmail.com>于2019年9月16日 周一08:02写道:
> 
>> Looks like snapshot files somehow disappear at destination cluster
>> 
>> Complete stack trace:
>> 
>> 2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id :
>> attempt_1563449245683_0180_m_000043_2, Status : FAILED
>> Error: java.io.FileNotFoundException: File does not exist:
>> /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0
>> (inode 10503038) Holder
>> DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have
>> any open files.
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>        at
>> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>        at
>> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>        at
>> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>        at java.security.AccessController.doPrivileged(Native Method)
>>        at javax.security.auth.Subject.doAs(Subject.java:422)
>>        at
>> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>>        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> Method)
>>        at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>        at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>>        at
>> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>>        at
>> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>>        at
>> org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
>>        at
>> org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
>>        at
>> org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
>>        at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
>> Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException):
>> File does not exist:
>> /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0
>> (inode 10503038) Holder
>> DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have
>> any open files.
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>>        at
>> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>>        at
>> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>>        at
>> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>>        at
>> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>>        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>>        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>>        at java.security.AccessController.doPrivileged(Native Method)
>>        at javax.security.auth.Subject.doAs(Subject.java:422)
>>        at
>> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>>        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>> 
>>        at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
>>        at org.apache.hadoop.ipc.Client.call(Client.java:1443)
>>        at org.apache.hadoop.ipc.Client.call(Client.java:1353)
>>        at
>> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>>        at
>> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>>        at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
>>        at
>> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
>>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>        at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>        at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>        at java.lang.reflect.Method.invoke(Method.java:498)
>>        at
>> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>>        at
>> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>>        at
>> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>>        at
>> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>>        at
>> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>>        at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
>>        at
>> org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
>>        ... 3 more


Re: ExportSnapshot to another cluster fails with: Error: java.io.FileNotFoundException: File does not exist: /hbase/archive/data/default/TABLE/...

Posted by "张铎 (Duo Zhang)" <pa...@gmail.com>.
Try disabling the hfile cleaner at the destination cluster when exporting
the snapshot?

Alexander Batyrshin <0x...@gmail.com>于2019年9月16日 周一08:02写道:

> Looks like snapshot files somehow disappear at destination cluster
>
> Complete stack trace:
>
> 2019-09-16 03:00:18,291 INFO  [main] mapreduce.Job: Task Id :
> attempt_1563449245683_0180_m_000043_2, Status : FAILED
> Error: java.io.FileNotFoundException: File does not exist:
> /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0
> (inode 10503038) Holder
> DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have
> any open files.
>         at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>         at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>         at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>         at
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>         at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>         at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>         at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>         at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:422)
>         at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>         at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>         at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>         at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>         at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>         at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>         at
> org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
>         at
> org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
>         at
> org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
>         at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
> Caused by: org.apache.hadoop.ipc.RemoteException(java.io.FileNotFoundException):
> File does not exist:
> /hbase/archive/data/default/TABLE/f4b9c7e6c303266b123c96d459bdfcfc/d/e1d121ce8072409f9965ab770c229fa0
> (inode 10503038) Holder
> DFSClient_attempt_1563449245683_0180_m_000043_2_-1896170304_1 does not have
> any open files.
>         at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)
>         at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)
>         at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)
>         at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)
>         at
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>         at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)
>         at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)
>         at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)
>         at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:422)
>         at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)
>         at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)
>
>         at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1497)
>         at org.apache.hadoop.ipc.Client.call(Client.java:1443)
>         at org.apache.hadoop.ipc.Client.call(Client.java:1353)
>         at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
>         at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
>         at com.sun.proxy.$Proxy17.addBlock(Unknown Source)
>         at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:510)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
>         at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
>         at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
>         at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
>         at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
>         at com.sun.proxy.$Proxy18.addBlock(Unknown Source)
>         at
> org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1078)
>         ... 3 more