You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/12/30 18:43:09 UTC

svn commit: r1648556 - in /hive/branches/HIVE-8065: itests/qtest/ itests/util/src/main/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/processors/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/encrypted/ q...

Author: brock
Date: Tue Dec 30 17:43:09 2014
New Revision: 1648556

URL: http://svn.apache.org/r1648556
Log:
HIVE-9167 - Enhance encryption testing framework to allow create keys & zones inside .q files (Sergio Pena via Brock)

Added:
    hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/EncryptionProcessor.java
    hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_load_data_to_encrypted_tables.q
    hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_load_data_to_encrypted_tables.q.out
Modified:
    hive/branches/HIVE-8065/itests/qtest/pom.xml
    hive/branches/HIVE-8065/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
    hive/branches/HIVE-8065/ql/src/test/templates/TestCliDriver.vm
    hive/branches/HIVE-8065/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/HIVE-8065/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/HIVE-8065/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/branches/HIVE-8065/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/itests/qtest/pom.xml?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/itests/qtest/pom.xml (original)
+++ hive/branches/HIVE-8065/itests/qtest/pom.xml Tue Dec 30 17:43:09 2014
@@ -544,7 +544,7 @@
                               runDisabled="${run_disabled}"
                               hiveConfDir="${basedir}/${hive.path.to.root}/data/conf"
                               resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/encrypted"
-                              className="TestEncrytedHDFSCliDriver"
+                              className="TestEncryptedHDFSCliDriver"
                               logFile="${project.build.directory}/testencryptedhdfsclidrivergen.log"
                               logDirectory="${project.build.directory}/qfile-results/clientpositive/"
                               hadoopVersion="${active.hadoop.version}"

Modified: hive/branches/HIVE-8065/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/HIVE-8065/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Tue Dec 30 17:43:09 2014
@@ -38,6 +38,7 @@ import java.io.OutputStreamWriter;
 import java.io.PrintStream;
 import java.io.Serializable;
 import java.io.StringWriter;
+import java.lang.RuntimeException;
 import java.net.URL;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
@@ -54,6 +55,7 @@ import java.util.regex.Pattern;
 
 import junit.framework.Assert;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
@@ -94,6 +96,10 @@ import org.apache.tools.ant.BuildExcepti
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
 import org.apache.zookeeper.ZooKeeper;
+import org.apache.hadoop.hive.ql.processors.EncryptionProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.processors.HiveCommand;
 
 import com.google.common.collect.ImmutableList;
 
@@ -110,17 +116,12 @@ public class QTestUtil {
   private static final String ENCRYPTED_WITH_256_BITS_KEY_DB_NAME = "encryptedwith256bitskeydb";
 
   // security property names
-  private static final String SECURITY_KEY_BIT_LENGTH_PROP_NAME =
-    "hadoop.security.key.default.bitlength";
-  private static final String SECURITY_KEY_CIPHER_NAME = "hadoop.security.key.default.cipher";
+  private static final String SECURITY_KEY_PROVIDER_URI_NAME = "dfs.encryption.key.provider.uri";
 
   // keyNames used for encrypting the hdfs path
   private final String KEY_NAME_IN_128 = "k128";
   private final String KEY_NAME_IN_256 = "k256";
 
-  // hadoop cipher
-  private final String HADOOP_CIPHER_NAME = "AES/CTR/NoPadding";
-
   private static final Log LOG = LogFactory.getLog("QTestUtil");
   private static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES";
   private final String defaultInitScript = "q_test_init.sql";
@@ -265,13 +266,6 @@ public class QTestUtil {
     return null;
   }
 
-  private void initEncryptionRelatedConf() {
-    HadoopShims shims = ShimLoader.getHadoopShims();
-    // set up the java key provider for encrypted hdfs cluster
-    conf.set(shims.getHadoopConfNames().get("HADOOPSECURITYKEYPROVIDER"), getKeyProviderURI());
-    conf.set(SECURITY_KEY_CIPHER_NAME, HADOOP_CIPHER_NAME);
-  }
-
   public void initConf() throws Exception {
 
     String vectorizationEnabled = System.getProperty("test.vectorization.enabled");
@@ -365,12 +359,16 @@ public class QTestUtil {
       FileSystem fs;
 
       if (clusterType == MiniClusterType.encrypted) {
-        initEncryptionRelatedConf();
+        // Set the security key provider so that the MiniDFS cluster is initialized
+        // with encryption
+        conf.set(SECURITY_KEY_PROVIDER_URI_NAME, getKeyProviderURI());
 
         dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
         fs = dfs.getFileSystem();
+
         // set up the java key provider for encrypted hdfs cluster
         hes = shims.createHdfsEncryptionShim(fs, conf);
+
         LOG.info("key provider is initialized");
       } else {
         dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
@@ -778,23 +776,14 @@ public class QTestUtil {
   }
 
   private void initEncryptionZone() throws IOException, NoSuchAlgorithmException, HiveException {
-    // current only aes/ctr/nopadding cipher is supported
-    conf.set(SECURITY_KEY_CIPHER_NAME, HADOOP_CIPHER_NAME);
-
-    // create encryption zone via a 128-bits key respectively for encrypted database 1
-    conf.set(SECURITY_KEY_BIT_LENGTH_PROP_NAME, "128");
-
-    hes.createKey(KEY_NAME_IN_128, conf);
+    hes.createKey(KEY_NAME_IN_128, 128);
     hes.createEncryptionZone(
       new Path(db.getDatabase(ENCRYPTED_WITH_128_BITS_KEY_DB_NAME).getLocationUri()),
       KEY_NAME_IN_128);
 
-    // create encryption zone via a 256-bits key respectively for encrypted database 2
-    conf.set(SECURITY_KEY_BIT_LENGTH_PROP_NAME, "256");
-
     // AES-256 can be used only if JCE is installed in your environment. Otherwise, any encryption
     // with this key will fail. Keys can be created, but when you try to encrypt something, fails.
-    hes.createKey(KEY_NAME_IN_256, conf);
+    hes.createKey(KEY_NAME_IN_256, 256);
     hes.createEncryptionZone(
       new Path(db.getDatabase(ENCRYPTED_WITH_256_BITS_KEY_DB_NAME).getLocationUri()),
       KEY_NAME_IN_256);
@@ -922,27 +911,121 @@ public class QTestUtil {
 
   private static final String CRLF = System.getProperty("line.separator");
   public int executeClient(String tname1, String tname2) {
-    String commands = getCommands(tname1) + CRLF + getCommands(tname2);
-    return cliDriver.processLine(commands);
+    List<String> commandList = new ArrayList<String>();
+
+    commandList.addAll(getCommands(tname1));
+    commandList.add(CRLF);
+    commandList.addAll(getCommands(tname2));
+
+    return executeClient(commandList);
   }
 
   public int executeClient(String tname) {
-    return cliDriver.processLine(getCommands(tname));
+    return executeClient(getCommands(tname));
+  }
+
+  public int executeClient(final List<String> commandList) {
+    int rc = 0;
+
+    for (String command : commandList) {
+      if (isCommandUsedForTesting(command)) {
+        rc = executeTestCommand(command);
+      } else {
+        rc = cliDriver.processLine(command);
+      }
+
+      if (rc != 0) {
+        break;
+      }
+    }
+
+    return rc;
+  }
+
+  private int executeTestCommand(final String command) {
+    String commandName = command.split("\\s+")[0];
+    String commandArgs = command.substring(commandName.length());
+
+    if (commandArgs.endsWith(";")) {
+      commandArgs = StringUtils.chop(commandArgs);
+    }
+
+    try {
+      CommandProcessor proc = getTestCommand(commandName);
+      if (proc != null) {
+        CommandProcessorResponse response = proc.run(commandArgs.trim());
+
+        int rc = response.getResponseCode();
+        if (rc != 0) {
+          SessionState.get().out.println(response);
+        }
+
+        return rc;
+      } else {
+        throw new RuntimeException("Could not get CommandProcessor for command: " + commandName);
+      }
+    } catch (Exception e) {
+      throw new RuntimeException("Could not execute test command: " + e.getMessage());
+    }
+  }
+
+  private CommandProcessor getTestCommand(final String commandName) {
+    HiveCommand testCommand = HiveCommand.find(new String[]{commandName}, HiveCommand.ONLY_FOR_TESTING);
+    if (testCommand == null) {
+      return null;
+    }
+
+    switch (testCommand) {
+      case CRYPTO:
+        if (hes == null) {
+          throw new RuntimeException("HDFS encryption is not initialized for testing.");
+        }
+
+        return new EncryptionProcessor(hes, conf);
+      default:
+        throw new IllegalArgumentException("Unknown test command: " + commandName);
+    }
+  }
+
+  private boolean isCommandUsedForTesting(final String command) {
+    String commandName = command.trim().split("\\s+")[0];
+    HiveCommand testCommand = HiveCommand.find(new String[]{commandName}, HiveCommand.ONLY_FOR_TESTING);
+    return testCommand != null;
+  }
+
+  private List<String> getCommands(final String testName) {
+    List<String> commandList = new ArrayList<String>();
+    String testCommands = qMap.get(testName);
+
+    String command = "";
+    for (String line : testCommands.split("\n")) {
+      line = line.trim();
+
+      if (StringUtils.isBlank(line) || isComment(line)) {
+        continue;
+      }
+
+      // Join multiple line commands into one line
+      if (StringUtils.endsWith(line, "\\")) {
+        command += " " + StringUtils.chop(line);
+        continue;
+      } else if (!StringUtils.endsWith(line, ";")) {
+        command += " " + line;
+        continue;
+      } else {
+        command += " " + line;
+      }
+
+      commandList.add(command.trim());
+      command = "";
+    }
+
+    return commandList;
   }
 
-  private String getCommands(String tname) {
-    String commands = qMap.get(tname);
-    StringBuilder newCommands = new StringBuilder(commands.length());
-    int lastMatchEnd = 0;
-    Matcher commentMatcher = Pattern.compile("^--.*$", Pattern.MULTILINE).matcher(commands);
-    while (commentMatcher.find()) {
-      newCommands.append(commands.substring(lastMatchEnd, commentMatcher.start()));
-      newCommands.append(commentMatcher.group().replaceAll("(?<!\\\\);", "\\\\;"));
-      lastMatchEnd = commentMatcher.end();
-    }
-    newCommands.append(commands.substring(lastMatchEnd, commands.length()));
-    commands = newCommands.toString();
-    return commands;
+  private boolean isComment(final String line) {
+    String lineTrimmed = line.trim();
+    return lineTrimmed.startsWith("#") || lineTrimmed.startsWith("--");
   }
 
   public boolean shouldBeSkipped(String tname) {

Added: hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/EncryptionProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/EncryptionProcessor.java?rev=1648556&view=auto
==============================================================================
--- hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/EncryptionProcessor.java (added)
+++ hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/EncryptionProcessor.java Tue Dec 30 17:43:09 2014
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.processors;
+
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.HadoopShims;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * This class processes HADOOP commands used for HDFS encryption. It is meant to be run
+ * only by Hive unit & queries tests.
+ */
+public class EncryptionProcessor implements CommandProcessor {
+  public static final Log LOG = LogFactory.getLog(EncryptionProcessor.class.getName());
+
+  private HadoopShims.HdfsEncryptionShim encryptionShim;
+
+  private Options CREATE_KEY_OPTIONS;
+  private Options DELETE_KEY_OPTIONS;
+  private Options CREATE_ZONE_OPTIONS;
+
+  private int DEFAULT_BIT_LENGTH = 128;
+
+  private HiveConf conf;
+
+  public EncryptionProcessor(HadoopShims.HdfsEncryptionShim encryptionShim, HiveConf conf) {
+    this.encryptionShim = encryptionShim;
+    this.conf = conf;
+
+    CREATE_KEY_OPTIONS = new Options();
+    CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create());
+    CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("bitLength").create());   // optional
+
+    DELETE_KEY_OPTIONS = new Options();
+    DELETE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create());
+
+    CREATE_ZONE_OPTIONS = new Options();
+    CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create());
+    CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("path").isRequired().create());
+  }
+
+  private CommandLine parseCommandArgs(final Options opts, String[] args) throws ParseException {
+    CommandLineParser parser = new GnuParser();
+    return parser.parse(opts, args);
+  }
+
+  private CommandProcessorResponse returnErrorResponse(final String errmsg) {
+    return new CommandProcessorResponse(1, "Encryption Processor Helper Failed:" + errmsg, null);
+  }
+
+  private void writeTestOutput(final String msg) {
+    SessionState.get().out.println(msg);
+  }
+
+  @Override
+  public void init() {
+  }
+
+  @Override
+  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
+    String[] args = command.split("\\s+");
+
+    if (args.length < 1) {
+      return returnErrorResponse("Command arguments are empty.");
+    }
+
+    if (encryptionShim == null) {
+      return returnErrorResponse("Hadoop encryption shim is not initialized.");
+    }
+
+    String action = args[0];
+    String params[] = Arrays.copyOfRange(args, 1, args.length);
+
+    try {
+      if (action.equalsIgnoreCase("create_key")) {
+        createEncryptionKey(params);
+      } else if (action.equalsIgnoreCase("create_zone")) {
+        createEncryptionZone(params);
+      } else if (action.equalsIgnoreCase("delete_key")) {
+        deleteEncryptionKey(params);
+      } else {
+        return returnErrorResponse("Unknown command action: " + action);
+      }
+    } catch (Exception e) {
+      return returnErrorResponse(e.getMessage());
+    }
+
+    return new CommandProcessorResponse(0);
+  }
+
+  /**
+   * Creates an encryption key using the parameters passed through the 'create_key' action.
+   *
+   * @param params Parameters passed to the 'create_key' command action.
+   * @throws Exception If key creation failed.
+   */
+  private void createEncryptionKey(String[] params) throws Exception {
+    CommandLine args = parseCommandArgs(CREATE_KEY_OPTIONS, params);
+
+    String keyName = args.getOptionValue("keyName");
+    String bitLength = args.getOptionValue("bitLength", Integer.toString(DEFAULT_BIT_LENGTH));
+
+    try {
+      encryptionShim.createKey(keyName, new Integer(bitLength));
+    } catch (Exception e) {
+      throw new Exception("Cannot create encryption key: " + e.getMessage());
+    }
+
+    writeTestOutput("Encryption key created: '" + keyName + "'");
+  }
+
+  /**
+   * Creates an encryption zone using the parameters passed through the 'create_zone' action.
+   *
+   * @param params Parameters passed to the 'create_zone' command action.
+   * @throws Exception If zone creation failed.
+   */
+  private void createEncryptionZone(String[] params) throws Exception {
+    CommandLine args = parseCommandArgs(CREATE_ZONE_OPTIONS, params);
+
+    String keyName = args.getOptionValue("keyName");
+    Path cryptoZone = new Path(args.getOptionValue("path"));
+    if (cryptoZone == null) {
+      throw new Exception("Cannot create encryption zone: Invalid path '"
+          + args.getOptionValue("path") + "'");
+    }
+
+    try {
+      encryptionShim.createEncryptionZone(cryptoZone, keyName);
+    } catch (IOException e) {
+      throw new Exception("Cannot create encryption zone: " + e.getMessage());
+    }
+
+    writeTestOutput("Encryption zone created: '" + cryptoZone + "' using key: '" + keyName + "'");
+  }
+
+  /**
+   * Deletes an encryption key using the parameters passed through the 'delete_key' action.
+   *
+   * @param params Parameters passed to the 'delete_key' command action.
+   * @throws Exception If key deletion failed.
+   */
+  private void deleteEncryptionKey(String[] params) throws Exception {
+    CommandLine args = parseCommandArgs(DELETE_KEY_OPTIONS, params);
+
+    String keyName = args.getOptionValue("keyName");
+    try {
+      encryptionShim.deleteKey(keyName);
+    } catch (IOException e) {
+      throw new Exception("Cannot delete encryption key: " + e.getMessage());
+    }
+
+    writeTestOutput("Encryption key deleted: '" + keyName + "'");
+  }
+}

Modified: hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original)
+++ hive/branches/HIVE-8065/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Tue Dec 30 17:43:09 2014
@@ -29,18 +29,40 @@ public enum HiveCommand {
   SET(),
   RESET(),
   DFS(),
+  CRYPTO(true),
   ADD(),
   LIST(),
   RELOAD(),
   DELETE(),
   COMPILE();
+
+  public static boolean ONLY_FOR_TESTING = true;
+  private boolean usedOnlyForTesting;
+
+  HiveCommand() {
+    this(false);
+  }
+
+  HiveCommand(boolean onlyForTesting) {
+    this.usedOnlyForTesting = onlyForTesting;
+  }
+
+  public boolean isOnlyForTesting() {
+    return this.usedOnlyForTesting;
+  }
+
   private static final Set<String> COMMANDS = new HashSet<String>();
   static {
     for (HiveCommand command : HiveCommand.values()) {
       COMMANDS.add(command.name());
     }
   }
+
   public static HiveCommand find(String[] command) {
+    return find(command, false);
+  }
+
+  public static HiveCommand find(String[] command, boolean findOnlyForTesting) {
     if (null == command){
       return null;
     }
@@ -54,7 +76,13 @@ public enum HiveCommand {
         //special handling for SQL "delete from <table> where..."
         return null;
       } else if (COMMANDS.contains(cmd)) {
-        return HiveCommand.valueOf(cmd);
+        HiveCommand hiveCommand = HiveCommand.valueOf(cmd);
+
+        if (findOnlyForTesting == hiveCommand.isOnlyForTesting()) {
+          return hiveCommand;
+        }
+
+        return null;
       }
     }
     return null;

Added: hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_load_data_to_encrypted_tables.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_load_data_to_encrypted_tables.q?rev=1648556&view=auto
==============================================================================
--- hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_load_data_to_encrypted_tables.q (added)
+++ hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_load_data_to_encrypted_tables.q Tue Dec 30 17:43:09 2014
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS encrypted_table;
+
+CREATE TABLE encrypted_table (key STRING, value STRING) LOCATION '/user/hive/warehouse/encrypted_table';
+
+crypto create_key --keyName key1;
+crypto create_zone --keyName key1 --path /user/hive/warehouse/encrypted_table;
+
+-- Test loading data from the local filesystem;
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE encrypted_table;
+SELECT * FROM encrypted_table;
+
+-- Test loading data from the hdfs filesystem;
+dfs -copyFromLocal ../../data/files/kv1.txt hdfs:///tmp/kv1.txt;
+LOAD DATA INPATH '/tmp/kv1.txt' OVERWRITE INTO TABLE encrypted_table;
+SELECT * FROM encrypted_table;
+
+DROP TABLE encrypted_table;
+
+crypto delete_key --keyName key1;
\ No newline at end of file

Added: hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_load_data_to_encrypted_tables.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_load_data_to_encrypted_tables.q.out?rev=1648556&view=auto
==============================================================================
--- hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_load_data_to_encrypted_tables.q.out (added)
+++ hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_load_data_to_encrypted_tables.q.out Tue Dec 30 17:43:09 2014
@@ -0,0 +1,1057 @@
+PREHOOK: query: DROP TABLE IF EXISTS encrypted_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS encrypted_table
+POSTHOOK: type: DROPTABLE
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+PREHOOK: Input: /user/hive/warehouse/encrypted_table
+PREHOOK: Output: database:default
+PREHOOK: Output: default@encrypted_table
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Input: /user/hive/warehouse/encrypted_table
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@encrypted_table
+Encryption key created: 'key1'
+Encryption zone created: '/user/hive/warehouse/encrypted_table' using key: 'key1'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE encrypted_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@encrypted_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' OVERWRITE INTO TABLE encrypted_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@encrypted_table
+PREHOOK: query: SELECT * FROM encrypted_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@encrypted_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM encrypted_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@encrypted_table
+#### A masked pattern was here ####
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484
+265	val_265
+193	val_193
+401	val_401
+150	val_150
+273	val_273
+224	val_224
+369	val_369
+66	val_66
+128	val_128
+213	val_213
+146	val_146
+406	val_406
+429	val_429
+374	val_374
+152	val_152
+469	val_469
+145	val_145
+495	val_495
+37	val_37
+327	val_327
+281	val_281
+277	val_277
+209	val_209
+15	val_15
+82	val_82
+403	val_403
+166	val_166
+417	val_417
+430	val_430
+252	val_252
+292	val_292
+219	val_219
+287	val_287
+153	val_153
+193	val_193
+338	val_338
+446	val_446
+459	val_459
+394	val_394
+237	val_237
+482	val_482
+174	val_174
+413	val_413
+494	val_494
+207	val_207
+199	val_199
+466	val_466
+208	val_208
+174	val_174
+399	val_399
+396	val_396
+247	val_247
+417	val_417
+489	val_489
+162	val_162
+377	val_377
+397	val_397
+309	val_309
+365	val_365
+266	val_266
+439	val_439
+342	val_342
+367	val_367
+325	val_325
+167	val_167
+195	val_195
+475	val_475
+17	val_17
+113	val_113
+155	val_155
+203	val_203
+339	val_339
+0	val_0
+455	val_455
+128	val_128
+311	val_311
+316	val_316
+57	val_57
+302	val_302
+205	val_205
+149	val_149
+438	val_438
+345	val_345
+129	val_129
+170	val_170
+20	val_20
+489	val_489
+157	val_157
+378	val_378
+221	val_221
+92	val_92
+111	val_111
+47	val_47
+72	val_72
+4	val_4
+280	val_280
+35	val_35
+427	val_427
+277	val_277
+208	val_208
+356	val_356
+399	val_399
+169	val_169
+382	val_382
+498	val_498
+125	val_125
+386	val_386
+437	val_437
+469	val_469
+192	val_192
+286	val_286
+187	val_187
+176	val_176
+54	val_54
+459	val_459
+51	val_51
+138	val_138
+103	val_103
+239	val_239
+213	val_213
+216	val_216
+430	val_430
+278	val_278
+176	val_176
+289	val_289
+221	val_221
+65	val_65
+318	val_318
+332	val_332
+311	val_311
+275	val_275
+137	val_137
+241	val_241
+83	val_83
+333	val_333
+180	val_180
+284	val_284
+12	val_12
+230	val_230
+181	val_181
+67	val_67
+260	val_260
+404	val_404
+384	val_384
+489	val_489
+353	val_353
+373	val_373
+272	val_272
+138	val_138
+217	val_217
+84	val_84
+348	val_348
+466	val_466
+58	val_58
+8	val_8
+411	val_411
+230	val_230
+208	val_208
+348	val_348
+24	val_24
+463	val_463
+431	val_431
+179	val_179
+172	val_172
+42	val_42
+129	val_129
+158	val_158
+119	val_119
+496	val_496
+0	val_0
+322	val_322
+197	val_197
+468	val_468
+393	val_393
+454	val_454
+100	val_100
+298	val_298
+199	val_199
+191	val_191
+418	val_418
+96	val_96
+26	val_26
+165	val_165
+327	val_327
+230	val_230
+205	val_205
+120	val_120
+131	val_131
+51	val_51
+404	val_404
+43	val_43
+436	val_436
+156	val_156
+469	val_469
+468	val_468
+308	val_308
+95	val_95
+196	val_196
+288	val_288
+481	val_481
+457	val_457
+98	val_98
+282	val_282
+197	val_197
+187	val_187
+318	val_318
+318	val_318
+409	val_409
+470	val_470
+137	val_137
+369	val_369
+316	val_316
+169	val_169
+413	val_413
+85	val_85
+77	val_77
+0	val_0
+490	val_490
+87	val_87
+364	val_364
+179	val_179
+118	val_118
+134	val_134
+395	val_395
+282	val_282
+138	val_138
+238	val_238
+419	val_419
+15	val_15
+118	val_118
+72	val_72
+90	val_90
+307	val_307
+19	val_19
+435	val_435
+10	val_10
+277	val_277
+273	val_273
+306	val_306
+224	val_224
+309	val_309
+389	val_389
+327	val_327
+242	val_242
+369	val_369
+392	val_392
+272	val_272
+331	val_331
+401	val_401
+242	val_242
+452	val_452
+177	val_177
+226	val_226
+5	val_5
+497	val_497
+402	val_402
+396	val_396
+317	val_317
+395	val_395
+58	val_58
+35	val_35
+336	val_336
+95	val_95
+11	val_11
+168	val_168
+34	val_34
+229	val_229
+233	val_233
+143	val_143
+472	val_472
+322	val_322
+498	val_498
+160	val_160
+195	val_195
+42	val_42
+321	val_321
+430	val_430
+119	val_119
+489	val_489
+458	val_458
+78	val_78
+76	val_76
+41	val_41
+223	val_223
+492	val_492
+149	val_149
+449	val_449
+218	val_218
+228	val_228
+138	val_138
+453	val_453
+30	val_30
+209	val_209
+64	val_64
+468	val_468
+76	val_76
+74	val_74
+342	val_342
+69	val_69
+230	val_230
+33	val_33
+368	val_368
+103	val_103
+296	val_296
+113	val_113
+216	val_216
+367	val_367
+344	val_344
+167	val_167
+274	val_274
+219	val_219
+239	val_239
+485	val_485
+116	val_116
+223	val_223
+256	val_256
+263	val_263
+70	val_70
+487	val_487
+480	val_480
+401	val_401
+288	val_288
+191	val_191
+5	val_5
+244	val_244
+438	val_438
+128	val_128
+467	val_467
+432	val_432
+202	val_202
+316	val_316
+229	val_229
+469	val_469
+463	val_463
+280	val_280
+2	val_2
+35	val_35
+283	val_283
+331	val_331
+235	val_235
+80	val_80
+44	val_44
+193	val_193
+321	val_321
+335	val_335
+104	val_104
+466	val_466
+366	val_366
+175	val_175
+403	val_403
+483	val_483
+53	val_53
+105	val_105
+257	val_257
+406	val_406
+409	val_409
+190	val_190
+406	val_406
+401	val_401
+114	val_114
+258	val_258
+90	val_90
+203	val_203
+262	val_262
+348	val_348
+424	val_424
+12	val_12
+396	val_396
+201	val_201
+217	val_217
+164	val_164
+431	val_431
+454	val_454
+478	val_478
+298	val_298
+125	val_125
+431	val_431
+164	val_164
+424	val_424
+187	val_187
+382	val_382
+5	val_5
+70	val_70
+397	val_397
+480	val_480
+291	val_291
+24	val_24
+351	val_351
+255	val_255
+104	val_104
+70	val_70
+163	val_163
+438	val_438
+119	val_119
+414	val_414
+200	val_200
+491	val_491
+237	val_237
+439	val_439
+360	val_360
+248	val_248
+479	val_479
+305	val_305
+417	val_417
+199	val_199
+444	val_444
+120	val_120
+429	val_429
+169	val_169
+443	val_443
+323	val_323
+325	val_325
+277	val_277
+230	val_230
+478	val_478
+178	val_178
+468	val_468
+310	val_310
+317	val_317
+333	val_333
+493	val_493
+460	val_460
+207	val_207
+249	val_249
+265	val_265
+480	val_480
+83	val_83
+136	val_136
+353	val_353
+172	val_172
+214	val_214
+462	val_462
+233	val_233
+406	val_406
+133	val_133
+175	val_175
+189	val_189
+454	val_454
+375	val_375
+401	val_401
+421	val_421
+407	val_407
+384	val_384
+256	val_256
+26	val_26
+134	val_134
+67	val_67
+384	val_384
+379	val_379
+18	val_18
+462	val_462
+492	val_492
+100	val_100
+298	val_298
+9	val_9
+341	val_341
+498	val_498
+146	val_146
+458	val_458
+362	val_362
+186	val_186
+285	val_285
+348	val_348
+167	val_167
+18	val_18
+273	val_273
+183	val_183
+281	val_281
+344	val_344
+97	val_97
+469	val_469
+315	val_315
+84	val_84
+28	val_28
+37	val_37
+448	val_448
+152	val_152
+348	val_348
+307	val_307
+194	val_194
+414	val_414
+477	val_477
+222	val_222
+126	val_126
+90	val_90
+169	val_169
+403	val_403
+400	val_400
+200	val_200
+97	val_97
+#### A masked pattern was here ####
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@encrypted_table
+#### A masked pattern was here ####
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@encrypted_table
+PREHOOK: query: SELECT * FROM encrypted_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@encrypted_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM encrypted_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@encrypted_table
+#### A masked pattern was here ####
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484
+265	val_265
+193	val_193
+401	val_401
+150	val_150
+273	val_273
+224	val_224
+369	val_369
+66	val_66
+128	val_128
+213	val_213
+146	val_146
+406	val_406
+429	val_429
+374	val_374
+152	val_152
+469	val_469
+145	val_145
+495	val_495
+37	val_37
+327	val_327
+281	val_281
+277	val_277
+209	val_209
+15	val_15
+82	val_82
+403	val_403
+166	val_166
+417	val_417
+430	val_430
+252	val_252
+292	val_292
+219	val_219
+287	val_287
+153	val_153
+193	val_193
+338	val_338
+446	val_446
+459	val_459
+394	val_394
+237	val_237
+482	val_482
+174	val_174
+413	val_413
+494	val_494
+207	val_207
+199	val_199
+466	val_466
+208	val_208
+174	val_174
+399	val_399
+396	val_396
+247	val_247
+417	val_417
+489	val_489
+162	val_162
+377	val_377
+397	val_397
+309	val_309
+365	val_365
+266	val_266
+439	val_439
+342	val_342
+367	val_367
+325	val_325
+167	val_167
+195	val_195
+475	val_475
+17	val_17
+113	val_113
+155	val_155
+203	val_203
+339	val_339
+0	val_0
+455	val_455
+128	val_128
+311	val_311
+316	val_316
+57	val_57
+302	val_302
+205	val_205
+149	val_149
+438	val_438
+345	val_345
+129	val_129
+170	val_170
+20	val_20
+489	val_489
+157	val_157
+378	val_378
+221	val_221
+92	val_92
+111	val_111
+47	val_47
+72	val_72
+4	val_4
+280	val_280
+35	val_35
+427	val_427
+277	val_277
+208	val_208
+356	val_356
+399	val_399
+169	val_169
+382	val_382
+498	val_498
+125	val_125
+386	val_386
+437	val_437
+469	val_469
+192	val_192
+286	val_286
+187	val_187
+176	val_176
+54	val_54
+459	val_459
+51	val_51
+138	val_138
+103	val_103
+239	val_239
+213	val_213
+216	val_216
+430	val_430
+278	val_278
+176	val_176
+289	val_289
+221	val_221
+65	val_65
+318	val_318
+332	val_332
+311	val_311
+275	val_275
+137	val_137
+241	val_241
+83	val_83
+333	val_333
+180	val_180
+284	val_284
+12	val_12
+230	val_230
+181	val_181
+67	val_67
+260	val_260
+404	val_404
+384	val_384
+489	val_489
+353	val_353
+373	val_373
+272	val_272
+138	val_138
+217	val_217
+84	val_84
+348	val_348
+466	val_466
+58	val_58
+8	val_8
+411	val_411
+230	val_230
+208	val_208
+348	val_348
+24	val_24
+463	val_463
+431	val_431
+179	val_179
+172	val_172
+42	val_42
+129	val_129
+158	val_158
+119	val_119
+496	val_496
+0	val_0
+322	val_322
+197	val_197
+468	val_468
+393	val_393
+454	val_454
+100	val_100
+298	val_298
+199	val_199
+191	val_191
+418	val_418
+96	val_96
+26	val_26
+165	val_165
+327	val_327
+230	val_230
+205	val_205
+120	val_120
+131	val_131
+51	val_51
+404	val_404
+43	val_43
+436	val_436
+156	val_156
+469	val_469
+468	val_468
+308	val_308
+95	val_95
+196	val_196
+288	val_288
+481	val_481
+457	val_457
+98	val_98
+282	val_282
+197	val_197
+187	val_187
+318	val_318
+318	val_318
+409	val_409
+470	val_470
+137	val_137
+369	val_369
+316	val_316
+169	val_169
+413	val_413
+85	val_85
+77	val_77
+0	val_0
+490	val_490
+87	val_87
+364	val_364
+179	val_179
+118	val_118
+134	val_134
+395	val_395
+282	val_282
+138	val_138
+238	val_238
+419	val_419
+15	val_15
+118	val_118
+72	val_72
+90	val_90
+307	val_307
+19	val_19
+435	val_435
+10	val_10
+277	val_277
+273	val_273
+306	val_306
+224	val_224
+309	val_309
+389	val_389
+327	val_327
+242	val_242
+369	val_369
+392	val_392
+272	val_272
+331	val_331
+401	val_401
+242	val_242
+452	val_452
+177	val_177
+226	val_226
+5	val_5
+497	val_497
+402	val_402
+396	val_396
+317	val_317
+395	val_395
+58	val_58
+35	val_35
+336	val_336
+95	val_95
+11	val_11
+168	val_168
+34	val_34
+229	val_229
+233	val_233
+143	val_143
+472	val_472
+322	val_322
+498	val_498
+160	val_160
+195	val_195
+42	val_42
+321	val_321
+430	val_430
+119	val_119
+489	val_489
+458	val_458
+78	val_78
+76	val_76
+41	val_41
+223	val_223
+492	val_492
+149	val_149
+449	val_449
+218	val_218
+228	val_228
+138	val_138
+453	val_453
+30	val_30
+209	val_209
+64	val_64
+468	val_468
+76	val_76
+74	val_74
+342	val_342
+69	val_69
+230	val_230
+33	val_33
+368	val_368
+103	val_103
+296	val_296
+113	val_113
+216	val_216
+367	val_367
+344	val_344
+167	val_167
+274	val_274
+219	val_219
+239	val_239
+485	val_485
+116	val_116
+223	val_223
+256	val_256
+263	val_263
+70	val_70
+487	val_487
+480	val_480
+401	val_401
+288	val_288
+191	val_191
+5	val_5
+244	val_244
+438	val_438
+128	val_128
+467	val_467
+432	val_432
+202	val_202
+316	val_316
+229	val_229
+469	val_469
+463	val_463
+280	val_280
+2	val_2
+35	val_35
+283	val_283
+331	val_331
+235	val_235
+80	val_80
+44	val_44
+193	val_193
+321	val_321
+335	val_335
+104	val_104
+466	val_466
+366	val_366
+175	val_175
+403	val_403
+483	val_483
+53	val_53
+105	val_105
+257	val_257
+406	val_406
+409	val_409
+190	val_190
+406	val_406
+401	val_401
+114	val_114
+258	val_258
+90	val_90
+203	val_203
+262	val_262
+348	val_348
+424	val_424
+12	val_12
+396	val_396
+201	val_201
+217	val_217
+164	val_164
+431	val_431
+454	val_454
+478	val_478
+298	val_298
+125	val_125
+431	val_431
+164	val_164
+424	val_424
+187	val_187
+382	val_382
+5	val_5
+70	val_70
+397	val_397
+480	val_480
+291	val_291
+24	val_24
+351	val_351
+255	val_255
+104	val_104
+70	val_70
+163	val_163
+438	val_438
+119	val_119
+414	val_414
+200	val_200
+491	val_491
+237	val_237
+439	val_439
+360	val_360
+248	val_248
+479	val_479
+305	val_305
+417	val_417
+199	val_199
+444	val_444
+120	val_120
+429	val_429
+169	val_169
+443	val_443
+323	val_323
+325	val_325
+277	val_277
+230	val_230
+478	val_478
+178	val_178
+468	val_468
+310	val_310
+317	val_317
+333	val_333
+493	val_493
+460	val_460
+207	val_207
+249	val_249
+265	val_265
+480	val_480
+83	val_83
+136	val_136
+353	val_353
+172	val_172
+214	val_214
+462	val_462
+233	val_233
+406	val_406
+133	val_133
+175	val_175
+189	val_189
+454	val_454
+375	val_375
+401	val_401
+421	val_421
+407	val_407
+384	val_384
+256	val_256
+26	val_26
+134	val_134
+67	val_67
+384	val_384
+379	val_379
+18	val_18
+462	val_462
+492	val_492
+100	val_100
+298	val_298
+9	val_9
+341	val_341
+498	val_498
+146	val_146
+458	val_458
+362	val_362
+186	val_186
+285	val_285
+348	val_348
+167	val_167
+18	val_18
+273	val_273
+183	val_183
+281	val_281
+344	val_344
+97	val_97
+469	val_469
+315	val_315
+84	val_84
+28	val_28
+37	val_37
+448	val_448
+152	val_152
+348	val_348
+307	val_307
+194	val_194
+414	val_414
+477	val_477
+222	val_222
+126	val_126
+90	val_90
+169	val_169
+403	val_403
+400	val_400
+200	val_200
+97	val_97
+PREHOOK: query: DROP TABLE encrypted_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@encrypted_table
+PREHOOK: Output: default@encrypted_table
+POSTHOOK: query: DROP TABLE encrypted_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@encrypted_table
+POSTHOOK: Output: default@encrypted_table
+Encryption key deleted: 'key1'

Modified: hive/branches/HIVE-8065/ql/src/test/templates/TestCliDriver.vm
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/test/templates/TestCliDriver.vm?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/ql/src/test/templates/TestCliDriver.vm (original)
+++ hive/branches/HIVE-8065/ql/src/test/templates/TestCliDriver.vm Tue Dec 30 17:43:09 2014
@@ -52,7 +52,7 @@ public class $className extends TestCase
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
       System.err.flush();
-      fail("Unexpected exception in static initialization"+e.getMessage());
+      fail("Unexpected exception in static initialization: "+e.getMessage());
     }
   }
 

Modified: hive/branches/HIVE-8065/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/HIVE-8065/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Dec 30 17:43:09 2014
@@ -869,7 +869,6 @@ public class Hadoop20Shims implements Ha
     ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution");
     ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed");
     ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed");
-    ret.put("HADOOPSECURITYKEYPROVIDER", "hadoop.encryption.is.not.supported");
     return ret;
   }
 

Modified: hive/branches/HIVE-8065/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/branches/HIVE-8065/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Tue Dec 30 17:43:09 2014
@@ -494,7 +494,6 @@ public class Hadoop20SShims extends Hado
     ret.put("HADOOPSPECULATIVEEXECREDUCERS", "mapred.reduce.tasks.speculative.execution");
     ret.put("MAPREDSETUPCLEANUPNEEDED", "mapred.committer.job.setup.cleanup.needed");
     ret.put("MAPREDTASKCLEANUPNEEDED", "mapreduce.job.committer.task.cleanup.needed");
-    ret.put("HADOOPSECURITYKEYPROVIDER", "dfs.encryption.key.provider.uri");
     return ret;
   }
 

Modified: hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/branches/HIVE-8065/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Dec 30 17:43:09 2014
@@ -58,6 +58,7 @@ import org.apache.hadoop.fs.permission.A
 import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.client.HdfsAdmin;
 import org.apache.hadoop.hdfs.protocol.EncryptionZone;
@@ -97,8 +98,6 @@ import com.google.common.collect.Iterabl
 public class Hadoop23Shims extends HadoopShimsSecure {
 
   HadoopShims.MiniDFSShim cluster = null;
-  MiniDFSCluster miniDFSCluster = null;
-  KeyProvider keyProvider;
   final boolean zeroCopy;
 
   public Hadoop23Shims() {
@@ -382,8 +381,13 @@ public class Hadoop23Shims extends Hadoo
       int numDataNodes,
       boolean format,
       String[] racks) throws IOException {
-    miniDFSCluster = new MiniDFSCluster(conf, numDataNodes, format, racks);
-    keyProvider = miniDFSCluster.getNameNode().getNamesystem().getProvider();
+    MiniDFSCluster miniDFSCluster = new MiniDFSCluster(conf, numDataNodes, format, racks);
+
+    // Need to set the client's KeyProvider to the NN's for JKS,
+    // else the updates do not get flushed properly
+    miniDFSCluster.getFileSystem().getClient().setKeyProvider(
+        miniDFSCluster.getNameNode().getNamesystem().getProvider());
+
     cluster = new MiniDFSShim(miniDFSCluster);
     return cluster;
   }
@@ -942,24 +946,26 @@ public class Hadoop23Shims extends Hadoo
   }
 
   public class HdfsEncryptionShim implements HadoopShims.HdfsEncryptionShim {
+    private final String HDFS_SECURITY_DEFAULT_CIPHER = "AES/CTR/NoPadding";
+
     /**
      * Gets information about HDFS encryption zones
      */
     private HdfsAdmin hdfsAdmin = null;
 
+    /**
+     * Used to compare encryption key strengths.
+     */
+    private KeyProvider keyProvider = null;
+
+    private Configuration conf;
+
     public HdfsEncryptionShim(URI uri, Configuration conf) throws IOException {
-      hdfsAdmin = new HdfsAdmin(uri, conf);
-      if (keyProvider == null) {
-        try {
-          // We use the first key provider found in the list of key providers. We don't know
-          // what to do with the rest, so let's skip them.
-          if (keyProvider == null) {
-            keyProvider = KeyProviderFactory.getProviders(conf).get(0);
-          }
-        } catch (Exception e) {
-          throw new IOException("Cannot create HDFS security object: ", e);
-        }
-      }
+      DistributedFileSystem dfs = (DistributedFileSystem)FileSystem.get(uri, conf);
+
+      this.conf = conf;
+      this.keyProvider = dfs.getClient().getKeyProvider();
+      this.hdfsAdmin = new HdfsAdmin(uri, conf);
     }
 
     @Override
@@ -1007,18 +1013,38 @@ public class Hadoop23Shims extends Hadoo
     }
 
     @Override
-    public void createKey(String keyName, Configuration conf)
+    public void createKey(String keyName, int bitLength)
       throws IOException, NoSuchAlgorithmException {
 
+      if (keyProvider == null) {
+        throw new IOException("HDFS security key provider is not configured on your server.");
+      }
+
       if (keyProvider.getMetadata(keyName) != null) {
-        LOG.info("key " + keyName + " has already exists");
+        LOG.info("key '" + keyName + "' already exists");
         return;
       }
-      Options options = new Options(conf);
+
+      final KeyProvider.Options options = new Options(this.conf);
+      options.setCipher(HDFS_SECURITY_DEFAULT_CIPHER);
+      options.setBitLength(bitLength);
       keyProvider.createKey(keyName, options);
       keyProvider.flush();
     }
 
+    @Override
+    public void deleteKey(String keyName) throws IOException {
+      if (keyProvider == null) {
+        throw new IOException("HDFS security key provider is not configured on your server.");
+      }
+
+      if (keyProvider.getMetadata(keyName) != null) {
+        keyProvider.deleteKey(keyName);
+      } else {
+        throw new IOException("key '" + keyName + "' does not exist.");
+      }
+    }
+
     /**
      * Compares two encryption key strengths.
      *

Modified: hive/branches/HIVE-8065/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1648556&r1=1648555&r2=1648556&view=diff
==============================================================================
--- hive/branches/HIVE-8065/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/HIVE-8065/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Dec 30 17:43:09 2014
@@ -835,9 +835,20 @@ public interface HadoopShims {
     @VisibleForTesting
     public void createEncryptionZone(Path path, String keyName) throws IOException;
 
+    /**
+     * Creates an encryption key.
+     *
+     * @param keyName Name of the key
+     * @param bitLength Key encryption length in bits (128 or 256).
+     * @throws IOException If an error occurs while creating the encryption key
+     * @throws NoSuchAlgorithmException If cipher algorithm is invalid.
+     */
     @VisibleForTesting
-    public void createKey(String keyName, Configuration conf)
+    public void createKey(String keyName, int bitLength)
       throws IOException, NoSuchAlgorithmException;
+
+    @VisibleForTesting
+    public void deleteKey(String keyName) throws IOException;
   }
 
   /**
@@ -868,9 +879,14 @@ public interface HadoopShims {
     }
 
     @Override
-    public void createKey(String keyName, Configuration conf) {
+    public void createKey(String keyName, int bitLength) {
     /* not supported */
     }
+
+    @Override
+    public void deleteKey(String keyName) throws IOException {
+    /* not supported */
+    };
   }
 
   /**