You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by an...@apache.org on 2019/08/29 10:41:27 UTC

[hive] branch master updated: HIVE-22148: S3A delegation tokens are not added in the job config of the Compactor. (Harish JP, reviewd by Anishek Agarwal)

This is an automated email from the ASF dual-hosted git repository.

anishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 1cbff4d  HIVE-22148: S3A delegation tokens are not added in the job config of the Compactor. (Harish JP, reviewd by Anishek Agarwal)
1cbff4d is described below

commit 1cbff4d0438ae5317f8c2acb16501e67516f0dc6
Author: Anishek Agarwal <an...@20149.local>
AuthorDate: Thu Aug 29 11:59:56 2019 +0530

    HIVE-22148: S3A delegation tokens are not added in the job config of the Compactor. (Harish JP, reviewd by Anishek Agarwal)
---
 .../org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java   | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
index 7c79d7b..0f1579a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
@@ -94,6 +94,7 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskAttemptContext;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hive.common.util.HiveStringUtils;
@@ -779,6 +780,15 @@ public class CompactorMR {
     job.setLong(MIN_TXN, minTxn);
     job.setLong(MAX_TXN, maxTxn);
 
+    // Add tokens for all the file system in the input path.
+    ArrayList<Path> dirs = new ArrayList<>();
+    if (baseDir != null) {
+      dirs.add(baseDir);
+    }
+    dirs.addAll(deltaDirs);
+    dirs.addAll(dirsToSearch);
+    TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs.toArray(new Path[]{}), job);
+
     if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) {
       mrJob = job;
     }