You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dolphinscheduler.apache.org by ki...@apache.org on 2021/12/09 01:27:37 UTC

[dolphinscheduler] branch 2.0.1-release updated: [Fix-7277][datasource] Support Kerberos auto renewal (#7277) (#7280)

This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch 2.0.1-release
in repository https://gitbox.apache.org/repos/asf/dolphinscheduler.git


The following commit(s) were added to refs/heads/2.0.1-release by this push:
     new db5367a  [Fix-7277][datasource] Support Kerberos auto renewal (#7277) (#7280)
db5367a is described below

commit db5367ab68199f692ad85322c4cbc63862480ada
Author: mask <39...@users.noreply.github.com>
AuthorDate: Thu Dec 9 09:27:29 2021 +0800

    [Fix-7277][datasource] Support Kerberos auto renewal (#7277) (#7280)
---
 .../datasource/hive/HiveDataSourceClient.java      | 59 ++++++++++++++++++++--
 1 file changed, 56 insertions(+), 3 deletions(-)

diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java
index 9b8b622..0d78c79 100644
--- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java
+++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java
@@ -32,19 +32,27 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import java.io.IOException;
+import java.lang.reflect.Field;
 import java.sql.Connection;
 import java.sql.SQLException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.springframework.jdbc.core.JdbcTemplate;
 
 import com.zaxxer.hikari.HikariDataSource;
 
+import sun.security.krb5.Config;
+
 public class HiveDataSourceClient extends CommonDataSourceClient {
 
     private static final Logger logger = LoggerFactory.getLogger(HiveDataSourceClient.class);
 
+    private ScheduledExecutorService kerberosRenewalService;
+
+    private Configuration hadoopConf;
     protected HikariDataSource oneSessionDataSource;
     private UserGroupInformation ugi;
 
@@ -53,7 +61,17 @@ public class HiveDataSourceClient extends CommonDataSourceClient {
     }
 
     @Override
+    protected void preInit() {
+        logger.info("PreInit in {}", getClass().getName());
+        this.kerberosRenewalService = Executors.newSingleThreadScheduledExecutor();
+    }
+
+    @Override
     protected void initClient(BaseConnectionParam baseConnectionParam) {
+        logger.info("Create Configuration for hive configuration.");
+        this.hadoopConf = createHadoopConf();
+        logger.info("Create Configuration success.");
+
         logger.info("Create UserGroupInformation.");
         this.ugi = createUserGroupInformation(baseConnectionParam.getUser());
         logger.info("Create ugi success.");
@@ -73,6 +91,15 @@ public class HiveDataSourceClient extends CommonDataSourceClient {
         String krb5File = PropertyUtils.getString(JAVA_SECURITY_KRB5_CONF_PATH);
         if (StringUtils.isNotBlank(krb5File)) {
             System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5File);
+            try {
+                Config.refresh();
+                Class<?> kerberosName = Class.forName("org.apache.hadoop.security.authentication.util.KerberosName");
+                Field field = kerberosName.getDeclaredField("defaultRealm");
+                field.setAccessible(true);
+                field.set(null, Config.getInstance().getDefaultRealm());
+            } catch (Exception e) {
+                throw new RuntimeException("Update Kerberos environment failed.", e);
+            }
         }
     }
 
@@ -80,15 +107,38 @@ public class HiveDataSourceClient extends CommonDataSourceClient {
         String krb5File = PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH);
         String keytab = PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH);
         String principal = PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME);
+
         try {
-            return CommonUtil.createUGI(getHadoopConf(), principal, keytab, krb5File, username);
+            UserGroupInformation ugi = CommonUtil.createUGI(getHadoopConf(), principal, keytab, krb5File, username);
+            try {
+                Field isKeytabField = ugi.getClass().getDeclaredField("isKeytab");
+                isKeytabField.setAccessible(true);
+                isKeytabField.set(ugi, true);
+            } catch (NoSuchFieldException | IllegalAccessException e) {
+                logger.warn(e.getMessage());
+            }
+
+            kerberosRenewalService.scheduleWithFixedDelay(() -> {
+                try {
+                    ugi.checkTGTAndReloginFromKeytab();
+                } catch (IOException e) {
+                    logger.error("Check TGT and Renewal from Keytab error", e);
+                }
+            }, 5, 5, TimeUnit.MINUTES);
+            return ugi;
         } catch (IOException e) {
             throw new RuntimeException("createUserGroupInformation fail. ", e);
         }
     }
 
+    protected Configuration createHadoopConf() {
+        Configuration hadoopConf = new Configuration();
+        hadoopConf.setBoolean("ipc.client.fallback-to-simple-auth-allowed", true);
+        return hadoopConf;
+    }
+
     protected Configuration getHadoopConf() {
-        return new Configuration();
+        return this.hadoopConf;
     }
 
     @Override
@@ -104,7 +154,10 @@ public class HiveDataSourceClient extends CommonDataSourceClient {
     @Override
     public void close() {
         super.close();
+
         logger.info("close HiveDataSourceClient.");
+        kerberosRenewalService.shutdown();
+        this.ugi = null;
 
         this.oneSessionDataSource.close();
         this.oneSessionDataSource = null;