You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@hbase.apache.org by "Jonathan Hsieh (JIRA)" <ji...@apache.org> on 2013/05/02 06:28:17 UTC

[jira] [Assigned] (HBASE-8477) [hadoop2] TestTableInputFormatScan* fails intermittently with PrivilegedActionException

     [ https://issues.apache.org/jira/browse/HBASE-8477?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Jonathan Hsieh reassigned HBASE-8477:
-------------------------------------

    Assignee: Jonathan Hsieh
    
> [hadoop2] TestTableInputFormatScan* fails intermittently with PrivilegedActionException
> ---------------------------------------------------------------------------------------
>
>                 Key: HBASE-8477
>                 URL: https://issues.apache.org/jira/browse/HBASE-8477
>             Project: HBase
>          Issue Type: Sub-task
>          Components: hadoop2, mapreduce, security
>    Affects Versions: 0.98.0, 0.95.1
>            Reporter: Jonathan Hsieh
>            Assignee: Jonathan Hsieh
>             Fix For: 0.98.0, 0.95.1
>
>
> In the test we see the following log messages which indicate an authentication problem and then some sort of recovery problem.
> {code}
> 2013-04-16 23:27:04,469 ERROR [IPC Server handler 0 on 45600] security.UserGroupInformation(1370): PriviledgedActionException as:ec2-user.hfs.2 (auth:SIMPLE) cause:org.apache.hadoop.security.AccessControlException: Can't continue with getBlockLocalPathInfo() authorization. The user ec2-user.hfs.2 is not allowed to call getBlockLocalPathInfo
> 2013-04-16 23:27:04,501 WARN  [PRI IPC Server handler 4 on 33892] hdfs.DFSInputStream(489): Failed to connect to /127.0.0.1:55547 for block, add to deadNodes and continue. org.apache.hadoop.security.AccessControlException: Can't continue with getBlockLocalPathInfo() authorization. The user ec2-user.hfs.2 is not allowed to call getBlockLocalPathInfo
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.checkBlockLocalPathAccess(DataNode.java:1016)
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.getBlockLocalPathInfo(DataNode.java:1026)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolServerSideTranslatorPB.getBlockLocalPathInfo(ClientDatanodeProtocolServerSideTranslatorPB.java:112)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos$ClientDatanodeProtocolService$2.callBlockingMethod(ClientDatanodeProtocolProtos.java:5104)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:454)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:910)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1694)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1690)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1367)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1688)
> org.apache.hadoop.security.AccessControlException: Can't continue with getBlockLocalPathInfo() authorization. The user ec2-user.hfs.2 is not allowed to call getBlockLocalPathInfo
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.checkBlockLocalPathAccess(DataNode.java:1016)
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.getBlockLocalPathInfo(DataNode.java:1026)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolServerSideTranslatorPB.getBlockLocalPathInfo(ClientDatanodeProtocolServerSideTranslatorPB.java:112)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos$ClientDatanodeProtocolService$2.callBlockingMethod(ClientDatanodeProtocolProtos.java:5104)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:454)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:910)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1694)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1690)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1367)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1688)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
> 	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
> 	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:57)
> 	at org.apache.hadoop.hdfs.DFSClient.getLocalBlockReader(DFSClient.java:790)
> 	at org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:888)
> 	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:455)
> 	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:645)
> 	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:689)
> 	at java.io.DataInputStream.readFully(DataInputStream.java:178)
> 	at java.io.DataInputStream.readFully(DataInputStream.java:152)
> 	at org.apache.hadoop.hbase.util.FSTableDescriptors.getTableDescriptorModtime(FSTableDescriptors.java:429)
> 	at org.apache.hadoop.hbase.util.FSTableDescriptors.getTableDescriptorModtime(FSTableDescriptors.java:414)
> 	at org.apache.hadoop.hbase.util.FSTableDescriptors.get(FSTableDescriptors.java:169)
> 	at org.apache.hadoop.hbase.util.FSTableDescriptors.get(FSTableDescriptors.java:132)
> 	at org.apache.hadoop.hbase.regionserver.HRegionServer.openRegion(HRegionServer.java:3350)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.hadoop.hbase.ipc.ProtobufRpcServerEngine$Server.call(ProtobufRpcServerEngine.java:174)
> 	at org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1871)
> Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): Can't continue with getBlockLocalPathInfo() authorization. The user ec2-user.hfs.2 is not allowed to call getBlockLocalPathInfo
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.checkBlockLocalPathAccess(DataNode.java:1016)
> 	at org.apache.hadoop.hdfs.server.datanode.DataNode.getBlockLocalPathInfo(DataNode.java:1026)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolServerSideTranslatorPB.getBlockLocalPathInfo(ClientDatanodeProtocolServerSideTranslatorPB.java:112)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos$ClientDatanodeProtocolService$2.callBlockingMethod(ClientDatanodeProtocolProtos.java:5104)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:454)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:910)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1694)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1690)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:396)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1367)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1688)
> 	at org.apache.hadoop.ipc.Client.call(Client.java:1164)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202)
> 	at com.sun.proxy.$Proxy20.getBlockLocalPathInfo(Unknown Source)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB.getBlockLocalPathInfo(ClientDatanodeProtocolTranslatorPB.java:199)
> 	at org.apache.hadoop.hdfs.BlockReaderLocal.getBlockPathInfo(BlockReaderLocal.java:254)
> 	at org.apache.hadoop.hdfs.BlockReaderLocal.newBlockReader(BlockReaderLocal.java:167)
> 	at org.apache.hadoop.hdfs.DFSClient.getLocalBlockReader(DFSClient.java:786)
> 	... 17 more
> {code}
> This seems similar to the other short-circuit-read hadoop2 related failures

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira