You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by da...@apache.org on 2018/10/07 17:27:23 UTC

hive git commit: HIVE-20651: JdbcStorageHandler password should be encrypted (Daniel Dai, reviewed by Sankar Hariappan)

Repository: hive
Updated Branches:
  refs/heads/master 3bd9d63ec -> 827f4f9c0


HIVE-20651: JdbcStorageHandler password should be encrypted (Daniel Dai, reviewed by Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/827f4f9c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/827f4f9c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/827f4f9c

Branch: refs/heads/master
Commit: 827f4f9c01dab706f247fb3d1a9099a405c67b2e
Parents: 3bd9d63
Author: Daniel Dai <da...@gmail.com>
Authored: Sun Oct 7 10:26:56 2018 -0700
Committer: Daniel Dai <da...@gmail.com>
Committed: Sun Oct 7 10:26:56 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/Constants.java  |   2 +
 .../test/resources/testconfiguration.properties |   1 +
 jdbc-handler/pom.xml                            |  14 ++
 .../hive/storage/jdbc/JdbcInputFormat.java      |   5 +-
 .../jdbc/conf/JdbcStorageConfigManager.java     |  19 +-
 .../jdbc/dao/GenericJdbcDatabaseAccessor.java   |  18 +-
 .../hive/storage/jdbc/TestJdbcInputFormat.java  |  15 +-
 pom.xml                                         |   1 +
 .../apache/hadoop/hive/ql/exec/Utilities.java   |  52 +++--
 .../hadoop/hive/ql/exec/mr/ExecMapper.java      |   6 +
 .../ql/exec/spark/SparkMapRecordHandler.java    |   6 +
 .../hive/ql/exec/tez/MapRecordProcessor.java    |   7 +
 .../hadoop/hive/ql/io/HiveInputFormat.java      |   1 +
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |   9 +-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   |   9 +
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |   2 +
 .../queries/clientpositive/external_jdbc_auth.q |  94 ++++++++
 .../llap/external_jdbc_auth.q.out               | 221 +++++++++++++++++++
 18 files changed, 441 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/common/src/java/org/apache/hadoop/hive/conf/Constants.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/Constants.java b/common/src/java/org/apache/hadoop/hive/conf/Constants.java
index 437096b..4badfa3 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/Constants.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/Constants.java
@@ -66,6 +66,8 @@ public class Constants {
   public static final String JDBC_DRIVER = JDBC_CONFIG_PREFIX + ".jdbc.driver";
   public static final String JDBC_USERNAME = JDBC_CONFIG_PREFIX + ".dbcp.username";
   public static final String JDBC_PASSWORD = JDBC_CONFIG_PREFIX + ".dbcp.password";
+  public static final String JDBC_KEYSTORE = JDBC_CONFIG_PREFIX + ".dbcp.password.keystore";
+  public static final String JDBC_KEY = JDBC_CONFIG_PREFIX + ".dbcp.password.key";
   public static final String JDBC_QUERY = JDBC_CONFIG_PREFIX + ".query";
   public static final String JDBC_QUERY_FIELD_NAMES = JDBC_CONFIG_PREFIX + ".query.fieldNames";
   public static final String JDBC_QUERY_FIELD_TYPES = JDBC_CONFIG_PREFIX + ".query.fieldTypes";

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 9984ce5..97609cf 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -516,6 +516,7 @@ minillaplocal.query.files=\
   explainanalyze_2.q,\
   explainuser_1.q,\
   explainuser_4.q,\
+  external_jdbc_auth.q,\
   external_jdbc_table.q,\
   external_jdbc_table2.q,\
   fullouter_mapjoin_1_optimized.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/jdbc-handler/pom.xml
----------------------------------------------------------------------
diff --git a/jdbc-handler/pom.xml b/jdbc-handler/pom.xml
index f90892f..ad421a1 100644
--- a/jdbc-handler/pom.xml
+++ b/jdbc-handler/pom.xml
@@ -113,6 +113,20 @@
       <scope>test</scope>
     </dependency>
 
+   <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+   </dependency>
+
+   <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-mockito</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+   </dependency>
+
     <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-common</artifactId>

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
index 59104fe..74999db 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcInputFormat.java
@@ -71,10 +71,7 @@ public class JdbcInputFormat extends HiveInputFormat<LongWritable, MapWritable>
 
       // We will split this query into n splits
       LOGGER.debug("Creating {} input splits", numSplits);
-
-      if (dbAccessor == null) {
-        dbAccessor = DatabaseAccessorFactory.getAccessor(job);
-      }
+      dbAccessor = DatabaseAccessorFactory.getAccessor(job);
 
       int numRecords = numSplits <=1 ? Integer.MAX_VALUE : dbAccessor.getTotalNumberOfRecords(job);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
index 1b5a826..18e2397 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/conf/JdbcStorageConfigManager.java
@@ -16,6 +16,7 @@ package org.apache.hive.storage.jdbc.conf;
 
 import java.io.IOException;
 import org.apache.hadoop.hive.conf.Constants;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -41,6 +42,8 @@ public class JdbcStorageConfigManager {
   private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageConfigManager.class);
   public static final String CONFIG_USERNAME = Constants.JDBC_USERNAME;
   public static final String CONFIG_PWD = Constants.JDBC_PASSWORD;
+  public static final String CONFIG_PWD_KEYSTORE = Constants.JDBC_KEYSTORE;
+  public static final String CONFIG_PWD_KEY = Constants.JDBC_KEY;
   private static final EnumSet<JdbcStorageConfig> DEFAULT_REQUIRED_PROPERTIES =
     EnumSet.of(JdbcStorageConfig.DATABASE_TYPE,
                JdbcStorageConfig.JDBC_URL,
@@ -58,7 +61,9 @@ public class JdbcStorageConfigManager {
     checkRequiredPropertiesAreDefined(props);
     resolveMetadata(props);
     for (Entry<Object, Object> entry : props.entrySet()) {
-      if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD)) {
+      if (!String.valueOf(entry.getKey()).equals(CONFIG_PWD) &&
+          !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEYSTORE) &&
+          !String.valueOf(entry.getKey()).equals(CONFIG_PWD_KEY)) {
         jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue()));
       }
     }
@@ -68,9 +73,14 @@ public class JdbcStorageConfigManager {
     throws HiveException, IOException {
     checkRequiredPropertiesAreDefined(props);
     resolveMetadata(props);
-    String secret = props.getProperty(CONFIG_PWD);
-    if (secret != null) {
-      jobSecrets.put(CONFIG_PWD, secret);
+    String passwd = props.getProperty(CONFIG_PWD);
+    if (passwd == null) {
+      String keystore = props.getProperty(CONFIG_PWD_KEYSTORE);
+      String key = props.getProperty(CONFIG_PWD_KEY);
+      passwd = Utilities.getPasswdFromKeystore(keystore, key);
+    }
+    if (passwd != null) {
+      jobSecrets.put(CONFIG_PWD, passwd);
     }
   }
 
@@ -87,7 +97,6 @@ public class JdbcStorageConfigManager {
     return conf;
   }
 
-
   private static void checkRequiredPropertiesAreDefined(Properties props) {
     DatabaseType dbType = null;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
index ab19318..abdc5f0 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessor.java
@@ -17,6 +17,7 @@ package org.apache.hive.storage.jdbc.dao;
 import org.apache.commons.dbcp.BasicDataSourceFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.Constants;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
@@ -52,7 +53,6 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
   protected static final int DEFAULT_FETCH_SIZE = 1000;
   protected static final Logger LOGGER = LoggerFactory.getLogger(GenericJdbcDatabaseAccessor.class);
   protected DataSource dbcpDataSource = null;
-  protected static final Text DBCP_PWD = new Text(DBCP_CONFIG_PREFIX + ".password");
 
 
   public GenericJdbcDatabaseAccessor() {
@@ -294,6 +294,9 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
     }
   }
 
+  private String getFromProperties(Properties dbProperties, String key) {
+    return dbProperties.getProperty(key.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", ""));
+  }
 
   protected Properties getConnectionPoolProperties(Configuration conf) throws Exception {
     // Create the default properties object
@@ -308,10 +311,15 @@ public class GenericJdbcDatabaseAccessor implements DatabaseAccessor {
     }
 
     // handle password
-    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
-    if (credentials.getSecretKey(DBCP_PWD) != null) {
-      LOGGER.info("found token in credentials");
-      dbProperties.put(DBCP_PWD,new String(credentials.getSecretKey(DBCP_PWD)));
+    String passwd = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD);
+    if (passwd == null) {
+      String keystore = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEYSTORE);
+      String key = getFromProperties(dbProperties, JdbcStorageConfigManager.CONFIG_PWD_KEY);
+      passwd = Utilities.getPasswdFromKeystore(keystore, key);
+    }
+
+    if (passwd != null) {
+      dbProperties.put(JdbcStorageConfigManager.CONFIG_PWD.replaceFirst(DBCP_CONFIG_PREFIX + "\\.", ""), passwd);
     }
 
     // essential properties that shouldn't be overridden by users

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
index e904774..b146633 100644
--- a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
+++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/TestJdbcInputFormat.java
@@ -18,11 +18,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hive.storage.jdbc.dao.DatabaseAccessor;
+import org.apache.hive.storage.jdbc.dao.DatabaseAccessorFactory;
 import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
 import org.junit.Test;
 import org.junit.runner.RunWith;
+import org.mockito.BDDMockito;
 import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
 import java.io.IOException;
 
@@ -32,7 +36,8 @@ import static org.junit.Assert.assertThat;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.when;
 
-@RunWith(MockitoJUnitRunner.class)
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(DatabaseAccessorFactory.class)
 public class TestJdbcInputFormat {
 
   @Mock
@@ -41,9 +46,10 @@ public class TestJdbcInputFormat {
 
   @Test
   public void testSplitLogic_noSpillOver() throws HiveJdbcDatabaseAccessException, IOException {
+    PowerMockito.mockStatic(DatabaseAccessorFactory.class);
+    BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15);
-    f.setDbAccessor(mockDatabaseAccessor);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");
@@ -58,9 +64,10 @@ public class TestJdbcInputFormat {
 
   @Test
   public void testSplitLogic_withSpillOver() throws HiveJdbcDatabaseAccessException, IOException {
+    PowerMockito.mockStatic(DatabaseAccessorFactory.class);
+    BDDMockito.given(DatabaseAccessorFactory.getAccessor(any(Configuration.class))).willReturn(mockDatabaseAccessor);
     JdbcInputFormat f = new JdbcInputFormat();
     when(mockDatabaseAccessor.getTotalNumberOfRecords(any(Configuration.class))).thenReturn(15);
-    f.setDbAccessor(mockDatabaseAccessor);
 
     JobConf conf = new JobConf();
     conf.set("mapred.input.dir", "/temp");

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5008923..75ebe9b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -188,6 +188,7 @@
     <opencsv.version>2.3</opencsv.version>
     <orc.version>1.5.3</orc.version>
     <mockito-all.version>1.10.19</mockito-all.version>
+    <powermock.version>1.7.4</powermock.version>
     <mina.version>2.0.0-M5</mina.version>
     <netty.version>4.1.17.Final</netty.version>
     <netty3.version>3.10.5.Final</netty3.version>

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 74fb1ba..76a30eb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -202,7 +202,9 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hive.common.util.ACLConfigurationParser;
 import org.apache.hive.common.util.ReflectionUtil;
@@ -2269,19 +2271,6 @@ public final class Utilities {
         job.set(entry.getKey(), entry.getValue());
       }
     }
-
-    try {
-      Map<String, String> jobSecrets = tbl.getJobSecrets();
-      if (jobSecrets != null) {
-        for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
-          job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
-          UserGroupInformation.getCurrentUser().getCredentials()
-            .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
-        }
-      }
-    } catch (IOException e) {
-      throw new HiveException(e);
-    }
   }
 
   /**
@@ -2307,18 +2296,24 @@ public final class Utilities {
         job.set(entry.getKey(), entry.getValue());
       }
     }
+  }
 
-    try {
-      Map<String, String> jobSecrets = tbl.getJobSecrets();
-      if (jobSecrets != null) {
-        for (Map.Entry<String, String> entry : jobSecrets.entrySet()) {
-          job.getCredentials().addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
-          UserGroupInformation.getCurrentUser().getCredentials()
-            .addSecretKey(new Text(entry.getKey()), entry.getValue().getBytes());
+  /**
+   * Copy job credentials to table properties
+   * @param tbl
+   */
+  public static void copyJobSecretToTableProperties(TableDesc tbl) throws IOException {
+    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
+    for (Text key : credentials.getAllSecretKeys()) {
+      String keyString = key.toString();
+      if (keyString.startsWith(TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT)) {
+        String[] comps = keyString.split(TableDesc.SECRET_DELIMIT);
+        String tblName = comps[1];
+        String keyName = comps[2];
+        if (tbl.getTableName().equalsIgnoreCase(tblName)) {
+          tbl.getProperties().put(keyName, new String(credentials.getSecretKey(key)));
         }
       }
-    } catch (IOException e) {
-      throw new HiveException(e);
     }
   }
 
@@ -4500,4 +4495,17 @@ public final class Utilities {
     }
     return bucketingVersion;
   }
+
+  public static String getPasswdFromKeystore(String keystore, String key) throws IOException {
+    String passwd = null;
+    if (keystore != null && key != null) {
+      Configuration conf = new Configuration();
+      conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore);
+      char[] pwdCharArray = conf.getPassword(key);
+      if (pwdCharArray != null) {
+        passwd = new String(pwdCharArray);
+      }
+    }
+    return passwd;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
index 99b33a3..91868a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java
@@ -24,6 +24,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -93,6 +95,10 @@ public class ExecMapper extends MapReduceBase implements Mapper {
 
       // create map and fetch operators
       MapWork mrwork = Utilities.getMapWork(job);
+      for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) {
+        TableDesc tableDesc = part.getTableDesc();
+        Utilities.copyJobSecretToTableProperties(tableDesc);
+      }
 
       CompilationOpContext runtimeCtx = new CompilationOpContext();
       if (mrwork.getVectorMode()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
index 7cd853f..88dd12c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkMapRecordHandler.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
@@ -70,6 +72,10 @@ public class SparkMapRecordHandler extends SparkRecordHandler {
       execContext = new ExecMapperContext(jc);
       // create map and fetch operators
       MapWork mrwork = Utilities.getMapWork(job);
+      for (PartitionDesc part : mrwork.getAliasToPartnInfo().values()) {
+        TableDesc tableDesc = part.getTableDesc();
+        Utilities.copyJobSecretToTableProperties(tableDesc);
+      }
 
       CompilationOpContext runtimeCtx = new CompilationOpContext();
       if (mrwork.getVectorMode()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
index ac43917..ea2e1fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java
@@ -30,6 +30,8 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hive.llap.LlapUtil;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -138,6 +140,11 @@ public class MapRecordProcessor extends RecordProcessor {
     // TODO HIVE-14042. Cleanup may be required if exiting early.
     Utilities.setMapWork(jconf, mapWork);
 
+    for (PartitionDesc part : mapWork.getAliasToPartnInfo().values()) {
+      TableDesc tableDesc = part.getTableDesc();
+      Utilities.copyJobSecretToTableProperties(tableDesc);
+    }
+
     String prefixes = jconf.get(DagUtils.TEZ_MERGE_WORK_FILE_PREFIXES);
     if (prefixes != null) {
       mergeWorkList = new ArrayList<MapWork>();

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index a86656c..6bac285 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -453,6 +453,7 @@ public class HiveInputFormat<K extends WritableComparable, V extends Writable>
     ValidWriteIdList validMmWriteIdList = getMmValidWriteIds(conf, table, validWriteIdList);
 
     try {
+      Utilities.copyJobSecretToTableProperties(table);
       Utilities.copyTablePropertiesToConf(table, conf);
       if (tableScan != null) {
         AcidUtils.setAcidOperationalProperties(conf, tableScan.getConf().isTranscationalTable(),

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 40cfdbd..22f3266 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -130,6 +130,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -2900,7 +2901,13 @@ public class CalcitePlanner extends SemanticAnalyzer {
             final String url = tabMetaData.getProperty(Constants.JDBC_URL);
             final String driver = tabMetaData.getProperty(Constants.JDBC_DRIVER);
             final String user = tabMetaData.getProperty(Constants.JDBC_USERNAME);
-            final String pswd = tabMetaData.getProperty(Constants.JDBC_PASSWORD);
+            //final String query = tabMetaData.getProperty("hive.sql.query");
+            String pswd = tabMetaData.getProperty(Constants.JDBC_PASSWORD);
+            if (pswd == null) {
+              String keystore = tabMetaData.getProperty(Constants.JDBC_KEYSTORE);
+              String key = tabMetaData.getProperty(Constants.JDBC_KEY);
+              pswd = Utilities.getPasswdFromKeystore(keystore, key);
+            }
             final String tableName = tabMetaData.getProperty(Constants.JDBC_TABLE);
 
             DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd);

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index 250a085..5229700 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -72,6 +72,7 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -981,6 +982,14 @@ public final class PlanUtils {
       if (storageHandler != null) {
         storageHandler.configureJobConf(tableDesc, jobConf);
       }
+      if (tableDesc.getJobSecrets() != null) {
+        for (Map.Entry<String, String> entry : tableDesc.getJobSecrets().entrySet()) {
+          String key = TableDesc.SECRET_PREFIX + TableDesc.SECRET_DELIMIT +
+                  tableDesc.getTableName() + TableDesc.SECRET_DELIMIT + entry.getKey();
+          jobConf.getCredentials().addSecretKey(new Text(key), entry.getValue().getBytes());
+        }
+        tableDesc.getJobSecrets().clear();
+      }
     } catch (HiveException e) {
       throw new RuntimeException(e);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
index b73faa5..7993779 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
@@ -53,6 +53,8 @@ public class TableDesc implements Serializable, Cloneable {
   private java.util.Properties properties;
   private Map<String, String> jobProperties;
   private Map<String, String> jobSecrets;
+  public static final String SECRET_PREFIX = "TABLE_SECRET";
+  public static final String SECRET_DELIMIT = "#";
 
   public TableDesc() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/test/queries/clientpositive/external_jdbc_auth.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/external_jdbc_auth.q b/ql/src/test/queries/clientpositive/external_jdbc_auth.q
new file mode 100644
index 0000000..acfb298
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/external_jdbc_auth.q
@@ -0,0 +1,94 @@
+--! qt:dataset:src
+
+CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput';
+
+FROM src
+
+SELECT
+
+dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;create=true','user1','passwd1',
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1','user1','passwd1',
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+
+limit 1;
+
+FROM src
+
+SELECT
+
+dboutput ( 'jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;create=true','user2','passwd2',
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+
+dboutput('jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2','user2','passwd2',
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+
+limit 1;
+
+
+CREATE EXTERNAL TABLE ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+                "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth1;collation=TERRITORY_BASED:PRIMARY",
+                "hive.sql.dbcp.username" = "user1",
+                "hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks",
+                "hive.sql.dbcp.password.key" = "test_derby_auth1.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+                "hive.sql.dbcp.maxActive" = "1"
+);
+
+
+CREATE EXTERNAL TABLE ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+                "hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${system:test.tmp.dir}/test_derby_auth2;collation=TERRITORY_BASED:PRIMARY",
+                "hive.sql.dbcp.username" = "user2",
+                "hive.sql.dbcp.password.keystore" = "jceks://file/${system:test.tmp.dir}/../../../data/files/test.jceks",
+                "hive.sql.dbcp.password.key" = "test_derby_auth2.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+                "hive.sql.dbcp.maxActive" = "1"
+);
+
+CREATE TABLE hive_table
+(
+  ikey int
+);
+
+INSERT INTO hive_table VALUES(20);
+
+(SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey);

http://git-wip-us.apache.org/repos/asf/hive/blob/827f4f9c/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out
new file mode 100644
index 0000000..badc8b9
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/external_jdbc_auth.q.out
@@ -0,0 +1,221 @@
+PREHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: dboutput
+POSTHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: dboutput
+PREHOOK: query: FROM src
+
+SELECT
+
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+
+SELECT
+
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE1 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE)' ),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','-20','-20.0','-20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','100','-15','65.0','-74.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE1 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','44','53','-455.454','330.76')
+
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0	0	0	0
+PREHOOK: query: FROM src
+
+SELECT
+
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+
+limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: FROM src
+
+SELECT
+
+#### A masked pattern was here ####
+'CREATE TABLE SIMPLE_DERBY_TABLE2 ("ikey" INTEGER, "bkey" BIGINT, "fkey" REAL, "dkey" DOUBLE )' ),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','20','20','20.0','20.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','-20','8','9.0','11.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','101','-16','66.0','-75.0'),
+
+#### A masked pattern was here ####
+'INSERT INTO SIMPLE_DERBY_TABLE2 ("ikey","bkey","fkey","dkey") VALUES (?,?,?,?)','40','50','-455.4543','330.767')
+
+limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	0	0	0	0
+PREHOOK: query: CREATE EXTERNAL TABLE ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "user1",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.password.key" = "test_derby_auth1.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ext_auth1
+POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth1
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "user1",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.password.key" = "test_derby_auth1.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE1",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ext_auth1
+PREHOOK: query: CREATE EXTERNAL TABLE ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "user2",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.password.key" = "test_derby_auth2.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ext_auth2
+POSTHOOK: query: CREATE EXTERNAL TABLE ext_auth2
+(
+ ikey int,
+ bkey bigint,
+ fkey float,
+ dkey double
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+                "hive.sql.database.type" = "DERBY",
+                "hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.username" = "user2",
+#### A masked pattern was here ####
+                "hive.sql.dbcp.password.key" = "test_derby_auth2.password",
+                "hive.sql.table" = "SIMPLE_DERBY_TABLE2",
+                "hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ext_auth2
+PREHOOK: query: CREATE TABLE hive_table
+(
+  ikey int
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hive_table
+POSTHOOK: query: CREATE TABLE hive_table
+(
+  ikey int
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hive_table
+PREHOOK: query: INSERT INTO hive_table VALUES(20)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hive_table
+POSTHOOK: query: INSERT INTO hive_table VALUES(20)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hive_table
+POSTHOOK: Lineage: hive_table.ikey SCRIPT []
+PREHOOK: query: (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ext_auth1
+PREHOOK: Input: default@ext_auth2
+PREHOOK: Input: default@hive_table
+#### A masked pattern was here ####
+POSTHOOK: query: (SELECT * FROM ext_auth1 JOIN hive_table ON ext_auth1.ikey=hive_table.ikey) UNION ALL (SELECT * FROM ext_auth2 JOIN hive_table ON ext_auth2.ikey=hive_table.ikey)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ext_auth1
+POSTHOOK: Input: default@ext_auth2
+POSTHOOK: Input: default@hive_table
+#### A masked pattern was here ####
+20	20	20.0	20.0	20
+20	20	20.0	20.0	20