You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@hbase.apache.org by "Jonathan Hsieh (JIRA)" <ji...@apache.org> on 2012/08/30 00:40:07 UTC

[jira] [Created] (HBASE-6691) HFile quarantine wails with missing files in hadoop 2.0

Jonathan Hsieh created HBASE-6691:
-------------------------------------

             Summary: HFile quarantine wails with missing files in hadoop 2.0
                 Key: HBASE-6691
                 URL: https://issues.apache.org/jira/browse/HBASE-6691
             Project: HBase
          Issue Type: Bug
    Affects Versions: 0.96.0
            Reporter: Jonathan Hsieh
             Fix For: 0.96.0


Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.

{code}
2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
java.lang.reflect.UndeclaredThrowableException
	at $Proxy23.getBlockLocations(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
	... 27 more
Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)

	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
	at $Proxy17.getBlockLocations(Unknown Source)
	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
	at $Proxy17.getBlockLocations(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
	... 31 more
{code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13446335#comment-13446335 ] 

Jonathan Hsieh commented on HBASE-6691:
---------------------------------------

Can someone take a look?  Otherwise, I'll commit over the weekend if there are no comments.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jimmy Xiang (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13447821#comment-13447821 ] 

Jimmy Xiang commented on HBASE-6691:
------------------------------------

If cause is UndeclaredThrowableException, could cause.getCause() be null?
If so, it is better to move this before checking is cause is null; otherwise, it is fine with me.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Component/s: hadoop2
           Tags:   (was: 0.96)
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>          Components: hadoop2, hbck
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Hudson (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13448174#comment-13448174 ] 

Hudson commented on HBASE-6691:
-------------------------------

Integrated in HBase-TRUNK-on-Hadoop-2.0.0 #159 (See [https://builds.apache.org/job/HBase-TRUNK-on-Hadoop-2.0.0/159/])
    HBASE-6691 HFile quarantine fails with missing files in hadoop 2.0 (Revision 1380790)

     Result = FAILURE
jmhsieh : 
Files : 
* /hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java

                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>          Components: hadoop2, hbck
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Hudson (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13447956#comment-13447956 ] 

Hudson commented on HBASE-6691:
-------------------------------

Integrated in HBase-TRUNK #3301 (See [https://builds.apache.org/job/HBase-TRUNK/3301/])
    HBASE-6691 HFile quarantine fails with missing files in hadoop 2.0 (Revision 1380790)

     Result = FAILURE
jmhsieh : 
Files : 
* /hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java

                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>          Components: hadoop2, hbck
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Assigned] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh reassigned HBASE-6691:
-------------------------------------

    Assignee: Jonathan Hsieh
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jimmy Xiang (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13447889#comment-13447889 ] 

Jimmy Xiang commented on HBASE-6691:
------------------------------------

+1
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Attachment: hbase-6691-v2.patch

Addressed Jimmy's issue.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13447903#comment-13447903 ] 

Jonathan Hsieh commented on HBASE-6691:
---------------------------------------

Thanks for the review jimmy.  committed to trunk.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>          Components: hadoop2, hbck
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Attachment: 6691.patch

This patch follows the pattern of other invoke calls catching and unwrapping InvocationTargetExceptions if they are of known types.  TestHBaseFsck::testQuarantineMissingHFile now passes against hadoop 2.0.  

Currently letting this bot run against 1.0, and having local run running against hadoop 2.0
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Hadoop QA (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13444619#comment-13444619 ] 

Hadoop QA commented on HBASE-6691:
----------------------------------

-1 overall.  Here are the results of testing the latest attachment 
  http://issues.apache.org/jira/secure/attachment/12543012/6691.patch
  against trunk revision .

    +1 @author.  The patch does not contain any @author tags.

    -1 tests included.  The patch doesn't appear to include any new or modified tests.
                        Please justify why no new tests are needed for this patch.
                        Also please list what manual steps were performed to verify this patch.

    +1 hadoop2.0.  The patch compiles against the hadoop 2.0 profile.

    -1 javadoc.  The javadoc tool appears to have generated 110 warning messages.

    -1 javac.  The applied patch generated 5 javac compiler warnings (more than the trunk's current 4 warnings).

    -1 findbugs.  The patch appears to introduce 9 new Findbugs (version 1.3.9) warnings.

    +1 release audit.  The applied patch does not increase the total number of release audit warnings.

    +1 core tests.  The patch passed unit tests in .

Test results: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//testReport/
Findbugs warnings: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//artifact/trunk/patchprocess/newPatchFindbugsWarningshbase-hadoop2-compat.html
Findbugs warnings: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//artifact/trunk/patchprocess/newPatchFindbugsWarningshbase-hadoop1-compat.html
Findbugs warnings: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//artifact/trunk/patchprocess/newPatchFindbugsWarningshbase-common.html
Findbugs warnings: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//artifact/trunk/patchprocess/newPatchFindbugsWarningshbase-hadoop-compat.html
Findbugs warnings: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//artifact/trunk/patchprocess/newPatchFindbugsWarningshbase-server.html
Console output: https://builds.apache.org/job/PreCommit-HBASE-Build/2734//console

This message is automatically generated.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Commented] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13445112#comment-13445112 ] 

Jonathan Hsieh commented on HBASE-6691:
---------------------------------------

Against hadoop 2, all tests eventually pass except for known bad TestImportExport and TestImportTsv.  TestMasterNoCluster and TestHCM had flakies against 2.0.

                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

            Tags: 0.96
      Resolution: Fixed
    Hadoop Flags: Reviewed
          Status: Resolved  (was: Patch Available)
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Attachment: hbase-6691-v2.patch

Updated to be fixed the way jimmy suggested.  Test passes.
                
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Status: Patch Available  (was: Open)
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Attachment:     (was: hbase-6691-v2.patch)
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Comment Edited] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jimmy Xiang (JIRA)" <ji...@apache.org>.
    [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13447821#comment-13447821 ] 

Jimmy Xiang edited comment on HBASE-6691 at 9/5/12 4:15 AM:
------------------------------------------------------------

If cause is UndeclaredThrowableException, could cause.getCause() be null?
If so, it is better to move this before checking if cause is null; otherwise, it is fine with me.

{noformat}
+                  if (cause != null && cause instanceof UndeclaredThrowableException) {
+                    cause = cause.getCause();
+                  }
+                  if (cause == null){
+                    throw new RuntimeException(
+                      "Proxy invocation failed and getCause is null", e);
+                  }
{noformat}

                
      was (Author: jxiang):
    If cause is UndeclaredThrowableException, could cause.getCause() be null?
If so, it is better to move this before checking is cause is null; otherwise, it is fine with me.
                  
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Component/s: hbck
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>          Components: hadoop2, hbck
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.96.0
>
>         Attachments: 6691.patch, hbase-6691-v2.patch
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

[jira] [Updated] (HBASE-6691) HFile quarantine fails with missing files in hadoop 2.0

Posted by "Jonathan Hsieh (JIRA)" <ji...@apache.org>.
     [ https://issues.apache.org/jira/browse/HBASE-6691?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh updated HBASE-6691:
----------------------------------

    Summary: HFile quarantine fails with missing files in hadoop 2.0  (was: HFile quarantine wails with missing files in hadoop 2.0)
    
> HFile quarantine fails with missing files in hadoop 2.0
> -------------------------------------------------------
>
>                 Key: HBASE-6691
>                 URL: https://issues.apache.org/jira/browse/HBASE-6691
>             Project: HBase
>          Issue Type: Bug
>    Affects Versions: 0.96.0
>            Reporter: Jonathan Hsieh
>             Fix For: 0.96.0
>
>
> Trunk/0.96 has a specific issue mentioned in HBASE-6686 when run against hadoop 2.0.   This addresses this problem.
> {code}
> 2012-08-29 12:55:26,031 ERROR [IPC Server handler 0 on 41070] security.UserGroupInformation(1235): PriviledgedActionException as:jon (auth:SIMPLE) cause:java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 2012-08-29 12:55:26,085 WARN  [Thread-2994] hbck.HFileCorruptionChecker(253): Failed to quaratine an HFile in regiondir hdfs://localhost:41070/user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc
> java.lang.reflect.UndeclaredThrowableException
> 	at $Proxy23.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:882)
> 	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:152)
> 	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:119)
> 	at org.apache.hadoop.hdfs.DFSInputStream.&lt;init&gt;(DFSInputStream.java:112)
> 	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:955)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:212)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:75)
> 	at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:664)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReaderWithEncoding(HFile.java:575)
> 	at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:605)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkHFile(HFileCorruptionChecker.java:94)
> 	at org.apache.hadoop.hbase.util.TestHBaseFsck$1$1.checkHFile(TestHBaseFsck.java:1401)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkColFamDir(HFileCorruptionChecker.java:175)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker.checkRegionDir(HFileCorruptionChecker.java:208)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:290)
> 	at org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker$RegionDirChecker.call(HFileCorruptionChecker.java:281)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
> 	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:98)
> 	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:206)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
> 	at java.lang.Thread.run(Thread.java:662)
> Caused by: java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor27.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:261)
> 	... 27 more
> Caused by: java.io.FileNotFoundException: File does not exist: /user/jon/hbase/testQuarantineMissingHFile/4332ea87d02d33e443550537722ff4fc/fam/befbe65ff30e4a46866f04a5671f0e44
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1133)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1095)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1067)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:384)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:165)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:42586)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:427)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:916)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1692)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1688)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1232)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1686)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1161)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:184)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at sun.reflect.GeneratedMethodAccessor26.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:165)
> 	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:84)
> 	at $Proxy17.getBlockLocations(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:149)
> 	... 31 more
> {code}

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira