You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2014/10/25 00:01:09 UTC

svn commit: r1634144 - in /hive/trunk: hbase-handler/src/java/org/apache/hadoop/hive/hbase/ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/

Author: sershe
Date: Fri Oct 24 22:01:09 2014
New Revision: 1634144

URL: http://svn.apache.org/r1634144
Log:
HIVE-6915 : Hive Hbase queries fail on secure Tez cluster (Siddharth Seth, reviewed by Sergey Shelukhin, Gunther Hagleitner)

Modified:
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1634144&r1=1634143&r2=1634144&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Fri Oct 24 22:01:09 2014
@@ -475,6 +475,11 @@ public class HBaseStorageHandler extends
       TableMapReduceUtil.addDependencyJars(copy);
       merged.addAll(copy.getConfiguration().getStringCollection("tmpjars"));
       jobConf.set("tmpjars", StringUtils.arrayToString(merged.toArray(new String[0])));
+
+      // Get credentials using the configuration instance which has HBase properties
+      JobConf hbaseJobConf = new JobConf(getConf());
+      org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf);
+      jobConf.getCredentials().mergeAll(hbaseJobConf.getCredentials());
     } catch (Exception e) {
       throw new RuntimeException(e);
     }

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1634144&r1=1634143&r2=1634144&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Fri Oct 24 22:01:09 2014
@@ -73,6 +73,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * HiveHBaseTableInputFormat implements InputFormat for HBase storage handler
@@ -428,7 +429,9 @@ public class HiveHBaseTableInputFormat e
   public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException {
 
     //obtain delegation tokens for the job
-    TableMapReduceUtil.initCredentials(jobConf);
+    if (UserGroupInformation.getCurrentUser().hasKerberosCredentials()) {
+      TableMapReduceUtil.initCredentials(jobConf);
+    }
 
     String hbaseTableName = jobConf.get(HBaseSerDe.HBASE_TABLE_NAME);
     setHTable(new HTable(HBaseConfiguration.create(jobConf), Bytes.toBytes(hbaseTableName)));

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java?rev=1634144&r1=1634143&r2=1634144&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java Fri Oct 24 22:01:09 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * HiveHBaseTableOutputFormat implements HiveOutputFormat for HBase tables.
@@ -72,7 +73,9 @@ public class HiveHBaseTableOutputFormat 
   public void checkOutputSpecs(FileSystem fs, JobConf jc) throws IOException {
 
     //obtain delegation tokens for the job
-    TableMapReduceUtil.initCredentials(jc);
+    if (UserGroupInformation.getCurrentUser().hasKerberosCredentials()) {
+      TableMapReduceUtil.initCredentials(jc);
+    }
 
     String hbaseTableName = jc.get(HBaseSerDe.HBASE_TABLE_NAME);
     jc.set(TableOutputFormat.OUTPUT_TABLE, hbaseTableName);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java?rev=1634144&r1=1634143&r2=1634144&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java Fri Oct 24 22:01:09 2014
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.ql.plan.Un
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.tez.common.counters.CounterGroup;
@@ -272,6 +273,7 @@ public class TezTask extends Task<TezWor
 
     // the name of the dag is what is displayed in the AM/Job UI
     DAG dag = DAG.create(work.getName());
+    dag.setCredentials(conf.getCredentials());
 
     for (BaseWork w: ws) {