You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Xiaoyu Wang (JIRA)" <ji...@apache.org> on 2015/10/13 03:59:05 UTC
[jira] [Created] (SPARK-11075) Spark SQL Thrift Server
authentication issue on kerberized yarn cluster
Xiaoyu Wang created SPARK-11075:
-----------------------------------
Summary: Spark SQL Thrift Server authentication issue on kerberized yarn cluster
Key: SPARK-11075
URL: https://issues.apache.org/jira/browse/SPARK-11075
Project: Spark
Issue Type: Bug
Components: SQL
Affects Versions: 1.5.1, 1.5.0, 1.4.1
Environment: hive-1.2.1
hadoop-2.6.0 config kerbers
Reporter: Xiaoyu Wang
Use proxy user connect to the thrift server by beeline but got permission exception:
1.Start the hive 1.2.1 metastore with user hive
{code}
$kinit -kt /tmp/hive.keytab hive/xxx
$nohup ./hive --service metastore 2>&1 >> ../logs/metastore.log &
{code}
2.Start the spark thrift server with user hive
{code}
$kinit -kt /tmp/hive.keytab hive/xxx
$./start-thriftserver.sh --master yarn
{code}
3.Connect to the thrift server with proxy user hive01
{code}
$kinit hive01
beeline command:!connect jdbc:hive2://xxx:10000/default;principal=hive/xxxx@HADOOP.COM;kerberosAuthType=kerberos;hive.server2.proxy.user=hive01
{code}
4.Create table and insert data
{code}
create table test(name string);
insert overwrite table test select * from sometable;
{code}
the insert sql got exception:
{noformat}
Error: org.apache.hadoop.security.AccessControlException: Permission denied: user=hive01, access=WRITE, inode="/user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary/0/task_201510100917_0003_m_000000":hive:hadoop:drwxr-xr-x
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:271)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:257)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:238)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:182)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6512)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renameToInternal(FSNamesystem.java:3805)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renameToInt(FSNamesystem.java:3775)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renameTo(FSNamesystem.java:3739)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.rename(NameNodeRpcServer.java:754)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.rename(ClientNamenodeProtocolServerSideTranslatorPB.java:565)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2039)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2035)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2033) (state=,code=0)
{noformat}
The table path on HDFS:
{noformat}
drwxrwxrwx - hive hadoop 0 2015-10-10 09:14 /user/hive/warehouse/test
drwxrwxrwx - hive01 hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2
drwxr-xr-x - hive01 hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000
drwxr-xr-x - hive01 hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary
drwxr-xr-x - hive01 hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary/0
drwxr-xr-x - hive hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary/0/_temporary
drwxr-xr-x - hive hadoop 0 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary/0/task_201510100917_0003_m_000000
-rw-r--r-- 3 hive hadoop 24 2015-10-10 09:17 /user/hive/warehouse/test/.hive-staging_hive_2015-10-10_09-17-15_972_3267668540808140587-2/-ext-10000/_temporary/0/task_201510100917_0003_m_000000/part-00000.deflate
{noformat}
hive-site.xml config:
{code}
<property>
<name>hive.server2.authentication</name>
<value>KERBEROS</value>
</property>
<property>
<name>hive.server2.authentication.kerberos.principal</name>
<value>hive/_HOST@HADOOP.COM</value>
</property>
<property>
<name>hive.server2.authentication.kerberos.keytab</name>
<value>/tmp/hive.keytab</value>
</property>
<property>
<name>hive.metastore.sasl.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.kerberos.keytab.file</name>
<value>/tmp/hive.keytab</value>
</property>
<property>
<name>hive.metastore.kerberos.principal</name>
<value>hive/_HOST@HADOOP.COM</value>
</property>
<property>
<name>hive.security.authorization.enabled</name>
<value>true</value>
</property>
<property>
<name>hive.security.authorization.createtable.owner.grants</name>
<value>ALL</value>
</property>
<property>
<name>hive.security.authorization.task.factory</name>
<value>org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl</value>
</property>
<property>
<name>hive.server2.enable.impersonation</name>
<value>true</value>
</property>
{code}
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org