You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ambari.apache.org by "Siddharth Wagle (JIRA)" <ji...@apache.org> on 2014/03/20 17:42:46 UTC

[jira] [Resolved] (AMBARI-5156) Hive CLI using Tez runtime does not start by throwing HDFS exception

     [ https://issues.apache.org/jira/browse/AMBARI-5156?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Siddharth Wagle resolved AMBARI-5156.
-------------------------------------

    Resolution: Fixed

Committed to trunk.

> Hive CLI using Tez runtime does not start by throwing HDFS exception
> --------------------------------------------------------------------
>
>                 Key: AMBARI-5156
>                 URL: https://issues.apache.org/jira/browse/AMBARI-5156
>             Project: Ambari
>          Issue Type: Bug
>          Components: agent
>    Affects Versions: 1.5.0
>            Reporter: Siddharth Wagle
>            Assignee: Siddharth Wagle
>             Fix For: 1.5.0
>
>
> In a cluster node we had set the below in _/etc/hive/conf/hive-site.xml_
> {code}
> <property>
>     <name>hive.jar.directory</name>
>     <value>hdfs:///apps/hive/install</value>
> </property>
> {code}
> The HDFS folder has the following contents
> {code}
> # hadoop fs -ls -R /apps/hive/
> drwxr-xr-x   - hive hdfs          0 2014-03-19 17:10 /apps/hive/install
> -rwxr-xr-x   3 hive hdfs   14719276 2014-03-19 17:10 /apps/hive/install/hive-exec.jar
> drwxrwxrwx   - hive hdfs          0 2014-03-19 17:08 /apps/hive/warehouse
> {code}
> As user _ambari-qa_ I run the _hive_ command to hit this exception
> {code}
> $ hive
> Logging initialized using configuration in file:/etc/hive/conf.dist/hive-log4j.properties
> Exception in thread "main" java.lang.RuntimeException: org.apache.hadoop.security.AccessControlException: Permission denied: user=ambari-qa, access=WRITE, inode="/apps/hive/install":hive:hdfs:drwxr-xr-x
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:265)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:251)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:232)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:176)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5481)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5463)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:5437)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInternal(FSNamesystem.java:2265)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2218)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2171)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:517)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:354)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:585)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:928)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2003)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1999)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:415)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1548)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1997)
> 	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:345)
> 	at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:682)
> 	at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:626)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
> Caused by: org.apache.hadoop.security.AccessControlException: Permission denied: user=ambari-qa, access=WRITE, inode="/apps/hive/install":hive:hdfs:drwxr-xr-x
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:265)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:251)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:232)
> 	at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:176)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5481)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5463)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:5437)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInternal(FSNamesystem.java:2265)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2218)
> 	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2171)
> 	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:517)
> 	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:354)
> 	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
> 	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:585)
> 	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:928)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2003)
> 	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1999)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:415)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1548)
> 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1997)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> 	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
> 	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
> 	at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1602)
> 	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1461)
> 	at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1386)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem$6.doCall(DistributedFileSystem.java:394)
> 	at org.apache.hadoop.hdfs.DistributedFileSystem$6.doCall(DistributedFileSystem.java:390)
> 	at org.apache.had
> {code}



--
This message was sent by Atlassian JIRA
(v6.2#6252)