You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by dz...@apache.org on 2023/08/15 12:40:36 UTC

[drill] 01/01: Try to force simple auth mode.

This is an automated email from the ASF dual-hosted git repository.

dzamo pushed a commit to branch hadoop-impersonation-tests
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 5e1705e43025569e9bb49d3e6c5b7b8ee5635ffe
Author: James Turton <ja...@somecomputer.xyz>
AuthorDate: Tue Aug 15 12:11:27 2023 +0200

    Try to force simple auth mode.
---
 .../exec/impersonation/BaseTestImpersonation.java     | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)

diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
index f1d471d09e..73383fdbaf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
@@ -18,14 +18,17 @@
 package org.apache.drill.exec.impersonation;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.dotdrill.DotDrillType;
+import org.apache.drill.exec.rpc.security.KerberosHelper;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.base.Strings;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
@@ -38,6 +41,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.UserGroupInformation;
 
+import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.nio.file.Paths;
 import java.util.Map;
@@ -45,6 +49,7 @@ import java.util.Properties;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
 
 public class BaseTestImpersonation extends ClusterTest {
   protected static final String MINI_DFS_STORAGE_PLUGIN_NAME = "mini_dfs_plugin";
@@ -54,6 +59,7 @@ public class BaseTestImpersonation extends ClusterTest {
   protected static Configuration dfsConf;
   protected static FileSystem fs;
   protected static File miniDfsStoragePath;
+  private static KerberosHelper krbHelper;
 
   // Test users and groups
   protected static final String[] org1Users = { "user0_1", "user1_1", "user2_1", "user3_1", "user4_1", "user5_1" };
@@ -100,9 +106,22 @@ public class BaseTestImpersonation extends ClusterTest {
     dfsConf.set("hdfs.minidfs.basedir", miniDfsStoragePath.getCanonicalPath());
 
     if (isImpersonationEnabled) {
+      krbHelper = new KerberosHelper(BaseTestImpersonation.class.getSimpleName(), null);
+      krbHelper.setupKdc(BaseDirTestWatcher.createTempDir(dirTestWatcher.getTmpDir()));
+      String simpleAuth = "<configuration>" +
+          "<property>" +
+            "<name>hadoop.security.authentication</name>" +
+            "<value>simple</value>" +
+            "<final>true</final>" +
+          "</property>" +
+        "</configuration>";
+      // The following InputStream will be closed by dfsConf after reading
+      dfsConf.addResource(new ByteArrayInputStream(simpleAuth.getBytes()));
+
       // Set the proxyuser settings so that the user who is running the Drillbits/MiniDfs can impersonate other users.
       dfsConf.set(String.format("hadoop.proxyuser.%s.hosts", processUser), "*");
       dfsConf.set(String.format("hadoop.proxyuser.%s.groups", processUser), "*");
+      //dfsConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, krbHelper.serverKeytab.toString());
     }
 
     // Start the MiniDfs cluster