You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2013/11/26 05:22:35 UTC

svn commit: r1545536 [4/4] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/security/ hbase-client/src/test/java/org/apache/hadoop/hbase/security/ hbase-common/src/main/java/org/a...

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java Tue Nov 26 04:22:33 2013
@@ -46,8 +46,8 @@ public class WALCellCodec implements Cod
   /** Configuration key for the class to use when encoding cells in the WAL */
   public static final String WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
 
-  private final CompressionContext compression;
-  private final ByteStringUncompressor statelessUncompressor = new ByteStringUncompressor() {
+  protected final CompressionContext compression;
+  protected final ByteStringUncompressor statelessUncompressor = new ByteStringUncompressor() {
     @Override
     public byte[] uncompress(ByteString data, Dictionary dict) throws IOException {
       return WALCellCodec.uncompressByteString(data, dict);

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WriterBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WriterBase.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WriterBase.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WriterBase.java Tue Nov 26 04:22:33 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.io.util.LRUDictionary;
@@ -35,6 +36,12 @@ import org.apache.hadoop.hbase.util.FSUt
 public abstract class WriterBase implements HLog.Writer {
 
   protected CompressionContext compressionContext;
+  protected Configuration conf;
+
+  @Override
+  public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable) throws IOException {
+    this.conf = conf;
+  }
 
   public boolean initializeCompressionContext(Configuration conf, Path path) throws IOException {
     boolean doCompress = conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false);
@@ -48,4 +55,5 @@ public abstract class WriterBase impleme
     }
     return doCompress;
   }
+
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java Tue Nov 26 04:22:33 2013
@@ -123,7 +123,7 @@ public class CompressionTest {
     writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
     writer.close();
 
-    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     byte[] key = reader.getFirstKey();
     boolean rc = Bytes.toString(key).equals("testkey");

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java Tue Nov 26 04:22:33 2013
@@ -540,7 +540,7 @@ public class HBaseFsck extends Configure
         HFile.Reader hf = null;
         try {
           CacheConfig cacheConf = new CacheConfig(getConf());
-          hf = HFile.createReader(fs, hfile.getPath(), cacheConf);
+          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
           hf.loadFileInfo();
           KeyValue startKv = KeyValue.createKeyValueFromKey(hf.getFirstKey());
           start = startKv.getRow();

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java Tue Nov 26 04:22:33 2013
@@ -91,7 +91,7 @@ public class HFileCorruptionChecker {
   protected void checkHFile(Path p) throws IOException {
     HFile.Reader r = null;
     try {
-      r = HFile.createReader(fs, p, cacheConf);
+      r = HFile.createReader(fs, p, cacheConf, conf);
     } catch (CorruptHFileException che) {
       LOG.warn("Found corrupt HFile " + p, che);
       corrupted.add(p);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java Tue Nov 26 04:22:33 2013
@@ -231,7 +231,7 @@ public class HFilePerformanceEvaluation 
 
     @Override
     void setUp() throws Exception {
-      reader = HFile.createReader(this.fs, this.mf, new CacheConfig(this.conf));
+      reader = HFile.createReader(this.fs, this.mf, new CacheConfig(this.conf), this.conf);
       this.reader.loadFileInfo();
     }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java Tue Nov 26 04:22:33 2013
@@ -96,7 +96,7 @@ public class TestHalfStoreFileReader {
     }
     w.close();
 
-    HFile.Reader r = HFile.createReader(fs, p, cacheConf);
+    HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
     r.loadFileInfo();
     byte [] midkey = r.midkey();
     KeyValue midKV = KeyValue.createKeyValueFromKey(midkey);
@@ -117,7 +117,7 @@ public class TestHalfStoreFileReader {
       CacheConfig cacheConf)
       throws IOException {
     final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
-      cacheConf, bottom);
+      cacheConf, bottom, TEST_UTIL.getConfiguration());
     halfreader.loadFileInfo();
     final HFileScanner scanner = halfreader.getScanner(false, false);
 
@@ -162,7 +162,7 @@ public class TestHalfStoreFileReader {
       w.close();
 
 
-      HFile.Reader r = HFile.createReader(fs, p, cacheConf);
+      HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
       r.loadFileInfo();
       byte[] midkey = r.midkey();
       KeyValue midKV = KeyValue.createKeyValueFromKey(midkey);
@@ -218,7 +218,7 @@ public class TestHalfStoreFileReader {
                                         CacheConfig cacheConfig)
             throws IOException {
       final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p,
-              cacheConfig, bottom);
+              cacheConfig, bottom, TEST_UTIL.getConfiguration());
       halfreader.loadFileInfo();
       final HFileScanner scanner = halfreader.getScanner(false, false);
       scanner.seekBefore(seekBefore.getKey());

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java Tue Nov 26 04:22:33 2013
@@ -69,7 +69,7 @@ public class RandomSeek {
     CacheConfig cacheConf = new CacheConfig(cache, true, false, false, false,
         false, false, false);
 
-    Reader reader = HFile.createReader(lfs, path, cacheConf);
+    Reader reader = HFile.createReader(lfs, path, cacheConf, conf);
     reader.loadFileInfo();
     System.out.println(reader.getTrailer());
     long end = System.currentTimeMillis();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java Tue Nov 26 04:22:33 2013
@@ -219,9 +219,9 @@ public class TestCacheOnWrite {
   private void readStoreFile(boolean useTags) throws IOException {
     AbstractHFileReader reader;
     if (useTags) {
-      reader = (HFileReaderV3) HFile.createReader(fs, storeFilePath, cacheConf);
+        reader = (HFileReaderV3) HFile.createReader(fs, storeFilePath, cacheConf, conf);
     } else {
-      reader = (HFileReaderV2) HFile.createReader(fs, storeFilePath, cacheConf);
+        reader = (HFileReaderV2) HFile.createReader(fs, storeFilePath, cacheConf, conf);
     }
     LOG.info("HFile information: " + reader);
     final boolean cacheBlocks = false;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java Tue Nov 26 04:22:33 2013
@@ -56,7 +56,7 @@ public class TestFixedFileTrailer {
    * The number of used fields by version. Indexed by version minus two. 
    * Min version that we support is V2
    */
-  private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 14 };
+  private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14, 15 };
 
   private HBaseTestingUtility util = new HBaseTestingUtility();
   private FileSystem fs;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java Tue Nov 26 04:22:33 2013
@@ -87,7 +87,7 @@ public class TestHFile extends HBaseTest
     Writer w =
         HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
     w.close();
-    Reader r = HFile.createReader(fs, f, cacheConf);
+    Reader r = HFile.createReader(fs, f, cacheConf, conf);
     r.loadFileInfo();
     assertNull(r.getFirstKey());
     assertNull(r.getLastKey());
@@ -103,7 +103,7 @@ public class TestHFile extends HBaseTest
     fsos.close();
 
     try {
-      Reader r = HFile.createReader(fs, f, cacheConf);
+      Reader r = HFile.createReader(fs, f, cacheConf, conf);
     } catch (CorruptHFileException che) {
       // Expected failure
       return;
@@ -142,7 +142,7 @@ public class TestHFile extends HBaseTest
     truncateFile(fs, w.getPath(), trunc);
 
     try {
-      Reader r = HFile.createReader(fs, trunc, cacheConf);
+      Reader r = HFile.createReader(fs, trunc, cacheConf, conf);
     } catch (CorruptHFileException che) {
       // Expected failure
       return;
@@ -238,7 +238,7 @@ public class TestHFile extends HBaseTest
     fout.close();
     FSDataInputStream fin = fs.open(ncTFile);
     Reader reader = HFile.createReaderFromStream(ncTFile, fs.open(ncTFile),
-      fs.getFileStatus(ncTFile).getLen(), cacheConf);
+      fs.getFileStatus(ncTFile).getLen(), cacheConf, conf);
     System.out.println(cacheConf.toString());
     // Load up the index.
     reader.loadFileInfo();
@@ -325,7 +325,7 @@ public class TestHFile extends HBaseTest
     fout.close();
     FSDataInputStream fin = fs.open(mFile);
     Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
-        this.fs.getFileStatus(mFile).getLen(), cacheConf);
+        this.fs.getFileStatus(mFile).getLen(), cacheConf, conf);
     reader.loadFileInfo();
     // No data -- this should return false.
     assertFalse(reader.getScanner(false, false).seekTo());
@@ -356,7 +356,7 @@ public class TestHFile extends HBaseTest
       writer.append("foo".getBytes(), "value".getBytes());
       writer.close();
       fout.close();
-      Reader reader = HFile.createReader(fs, mFile, cacheConf);
+      Reader reader = HFile.createReader(fs, mFile, cacheConf, conf);
       reader.loadFileInfo();
       assertNull(reader.getMetaBlock("non-existant", false));
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java Tue Nov 26 04:22:33 2013
@@ -552,7 +552,7 @@ public class TestHFileBlockIndex {
       }
 
       // Read the HFile
-      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf);
+      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, conf);
       assertEquals(expectedNumLevels,
           reader.getTrailer().getNumDataIndexLevels());
 

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.hfile;
+
+import static org.junit.Assert.*;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.security.SecureRandom;
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.crypto.Cipher;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestHFileEncryption {
+  private static final Log LOG = LogFactory.getLog(TestHFileEncryption.class);
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static final SecureRandom RNG = new SecureRandom();
+
+  private static FileSystem fs;
+  private static Encryption.Context cryptoContext;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+    conf.setInt("hfile.format.version", 3);
+
+    fs = FileSystem.get(conf);
+
+    cryptoContext = Encryption.newContext(conf);
+    Cipher aes = Encryption.getCipher(conf, "AES");
+    assertNotNull(aes);
+    cryptoContext.setCipher(aes);
+    byte[] key = new byte[aes.getKeyLength()];
+    RNG.nextBytes(key);
+    cryptoContext.setKey(key);
+  }
+
+  private int writeBlock(FSDataOutputStream os, HFileContext fileContext, int size)
+      throws IOException {
+    HFileBlock.Writer hbw = new HFileBlock.Writer(null, fileContext);
+    DataOutputStream dos = hbw.startWriting(BlockType.DATA);
+    for (int j = 0; j < size; j++) {
+      dos.writeInt(j);
+    }
+    hbw.writeHeaderAndData(os);
+    LOG.info("Wrote a block at " + os.getPos() + " with" +
+        " onDiskSizeWithHeader=" + hbw.getOnDiskSizeWithHeader() +
+        " uncompressedSizeWithoutHeader=" + hbw.getOnDiskSizeWithoutHeader() +
+        " uncompressedSizeWithoutHeader=" + hbw.getUncompressedSizeWithoutHeader());
+    return hbw.getOnDiskSizeWithHeader();
+  }
+
+  private long readAndVerifyBlock(long pos, HFileBlock.FSReaderV2 hbr, int size)
+      throws IOException {
+    HFileBlock b = hbr.readBlockData(pos, -1, -1, false);
+    assertEquals(0, HFile.getChecksumFailuresCount());
+    b.sanityCheck();
+    LOG.info("Read a block at " + pos + " with" +
+        " onDiskSizeWithHeader=" + b.getOnDiskSizeWithHeader() +
+        " uncompressedSizeWithoutHeader=" + b.getOnDiskSizeWithoutHeader() +
+        " uncompressedSizeWithoutHeader=" + b.getUncompressedSizeWithoutHeader());
+    DataInputStream dis = b.getByteStream();
+    for (int i = 0; i < size; i++) {
+      int read = dis.readInt();
+      if (read != i) {
+        fail("Block data corrupt at element " + i);
+      }
+    }
+    return b.getOnDiskSizeWithHeader();
+  }
+
+  @Test(timeout=20000)
+  public void testDataBlockEncryption() throws IOException {
+    final int blocks = 10;
+    final int[] blockSizes = new int[blocks];
+    for (int i = 0; i < blocks; i++) {
+      blockSizes[i] = (1024 + RNG.nextInt(1024 * 63)) / Bytes.SIZEOF_INT;
+    }
+    for (Compression.Algorithm compression : TestHFileBlock.COMPRESSION_ALGORITHMS) {
+      Path path = new Path(TEST_UTIL.getDataTestDir(), "block_v3_" + compression + "_AES");
+      LOG.info("testDataBlockEncryption: encryption=AES compression=" + compression);
+      long totalSize = 0;
+      HFileContext fileContext = new HFileContextBuilder()
+        .withCompression(compression)
+        .withEncryptionContext(cryptoContext)
+        .build();
+      FSDataOutputStream os = fs.create(path);
+      try {
+        for (int i = 0; i < blocks; i++) {
+          totalSize += writeBlock(os, fileContext, blockSizes[i]);
+        }
+      } finally {
+        os.close();
+      }
+      FSDataInputStream is = fs.open(path);
+      try {
+        HFileBlock.FSReaderV2 hbr = new HFileBlock.FSReaderV2(is, totalSize, fileContext);
+        long pos = 0;
+        for (int i = 0; i < blocks; i++) {
+          pos += readAndVerifyBlock(pos, hbr, blockSizes[i]);
+        }
+      } finally {
+        is.close();
+      }
+    }
+  }
+
+  @Test(timeout=20000)
+  public void testHFileEncryptionMetadata() throws Exception {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    CacheConfig cacheConf = new CacheConfig(conf);
+
+    HFileContext fileContext = new HFileContext();
+    fileContext.setEncryptionContext(cryptoContext);
+
+    // write a simple encrypted hfile
+    Path path = new Path(TEST_UTIL.getDataTestDir(), "cryptometa.hfile");
+    FSDataOutputStream out = fs.create(path);
+    HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
+      .withOutputStream(out)
+      .withFileContext(fileContext)
+      .create();
+    writer.append("foo".getBytes(), "value".getBytes());
+    writer.close();
+    out.close();
+
+    // read it back in and validate correct crypto metadata
+    HFile.Reader reader = HFile.createReader(fs, path, cacheConf, conf);
+    reader.loadFileInfo();
+    FixedFileTrailer trailer = reader.getTrailer();
+    assertNotNull(trailer.getEncryptionKey());
+    Encryption.Context readerContext = reader.getFileContext().getEncryptionContext();
+    assertEquals(readerContext.getCipher().getName(), cryptoContext.getCipher().getName());
+    assertTrue(Bytes.equals(readerContext.getKeyBytes(),
+      cryptoContext.getKeyBytes()));
+  }
+
+  @Test(timeout=60000)
+  public void testHFileEncryption() throws Exception {
+    // Create 1000 random test KVs
+    RedundantKVGenerator generator = new RedundantKVGenerator();
+    List<KeyValue> testKvs = generator.generateTestKeyValues(1000);
+
+    // Iterate through data block encoding and compression combinations
+    Configuration conf = TEST_UTIL.getConfiguration();
+    CacheConfig cacheConf = new CacheConfig(conf);
+    for (DataBlockEncoding encoding: DataBlockEncoding.values()) {
+      for (Compression.Algorithm compression: TestHFileBlock.COMPRESSION_ALGORITHMS) {
+        HFileContext fileContext = new HFileContextBuilder()
+          .withBlockSize(4096) // small blocks
+          .withEncryptionContext(cryptoContext)
+          .withCompression(compression)
+          .withDataBlockEncoding(encoding)
+          .build();
+        // write a new test HFile
+        LOG.info("Writing with " + fileContext);
+        Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile");
+        FSDataOutputStream out = fs.create(path);
+        HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
+          .withOutputStream(out)
+          .withFileContext(fileContext)
+          .create();
+        for (KeyValue kv: testKvs) {
+          writer.append(kv);
+        }
+        writer.close();
+        out.close();
+
+        // read it back in
+        LOG.info("Reading with " + fileContext);
+        HFile.Reader reader = HFile.createReader(fs, path, cacheConf, conf);
+        reader.loadFileInfo();
+        FixedFileTrailer trailer = reader.getTrailer();
+        assertNotNull(trailer.getEncryptionKey());
+        HFileScanner scanner = reader.getScanner(false, false);
+        assertTrue("Initial seekTo failed", scanner.seekTo());
+        int i = 0;
+        do {
+          KeyValue kv = scanner.getKeyValue();
+          assertTrue("Read back an unexpected or invalid KV", testKvs.contains(kv));
+          i++;
+        } while (scanner.next());
+        reader.close();
+
+        assertEquals("Did not read back as many KVs as written", i, testKvs.size());
+
+        // Test random seeks with pread
+        LOG.info("Random seeking with " + fileContext);
+        reader = HFile.createReader(fs, path, cacheConf, conf);
+        scanner = reader.getScanner(false, true);
+        assertTrue("Initial seekTo failed", scanner.seekTo());
+        for (i = 0; i < 100; i++) {
+          KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
+          assertEquals("Unable to find KV as expected: " + kv, scanner.seekTo(kv.getKey()), 0);
+        }
+        reader.close();
+      }
+    }
+  }
+
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java Tue Nov 26 04:22:33 2013
@@ -77,7 +77,7 @@ public class TestHFileInlineToRootChunkC
     }
     hfw.close();
 
-    HFileReaderV2 reader = (HFileReaderV2) HFile.createReader(fs, hfPath, cacheConf);
+    HFileReaderV2 reader = (HFileReaderV2) HFile.createReader(fs, hfPath, cacheConf, conf);
     HFileScanner scanner = reader.getScanner(true, true);
     for (int i = 0; i < keys.size(); ++i) {
       scanner.seekTo(keys.get(i));

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java Tue Nov 26 04:22:33 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.hfile
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.security.SecureRandom;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Random;
@@ -31,7 +32,12 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -63,6 +69,8 @@ public class TestHFilePerformance extend
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
     formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     TEST_UTIL = new HBaseTestingUtility(conf);
     ROOT_DIR = TEST_UTIL.getDataTestDir("TestHFilePerformance").toString();
@@ -138,6 +146,7 @@ public class TestHFilePerformance extend
    * @param keyLength
    * @param valueLength
    * @param codecName "none", "lzo", "gz", "snappy"
+   * @param cipherName "none", "aes"
    * @param rows number of rows to be written.
    * @param writeMethod used for HFile only.
    * @param minBlockSize used for HFile only.
@@ -145,10 +154,11 @@ public class TestHFilePerformance extend
    */
    //TODO writeMethod: implement multiple ways of writing e.g. A) known length (no chunk) B) using a buffer and streaming (for many chunks).
   public void timeWrite(String fileType, int keyLength, int valueLength,
-    String codecName, long rows, String writeMethod, int minBlockSize)
+    String codecName, String cipherName, long rows, String writeMethod, int minBlockSize)
   throws IOException {
     System.out.println("File Type: " + fileType);
-    System.out.println("Writing " + fileType + " with codecName: " + codecName);
+    System.out.println("Writing " + fileType + " with codecName: " + codecName +
+      " cipherName: " + cipherName);
     long totalBytesWritten = 0;
 
 
@@ -164,13 +174,22 @@ public class TestHFilePerformance extend
     FSDataOutputStream fout =  createFSOutput(path);
 
     if ("HFile".equals(fileType)){
-        HFileContext meta = new HFileContextBuilder()
-                            .withCompression(AbstractHFileWriter.compressionByName(codecName))
-                            .withBlockSize(minBlockSize).build();
+        HFileContextBuilder builder = new HFileContextBuilder()
+	    .withCompression(AbstractHFileWriter.compressionByName(codecName))
+	    .withBlockSize(minBlockSize);
+        if (cipherName != "none") {
+          byte[] cipherKey = new byte[AES.KEY_LENGTH];
+          new SecureRandom().nextBytes(cipherKey);
+          builder.withEncryptionContext(
+            Encryption.newContext(conf)
+              .setCipher(Encryption.getCipher(conf, cipherName))
+              .setKey(cipherKey));
+        }
+        HFileContext context = builder.build();
         System.out.println("HFile write method: ");
         HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
             .withOutputStream(fout)
-            .withFileContext(meta)
+            .withFileContext(context)
             .withComparator(new KeyValue.RawBytesComparator())
             .create();
 
@@ -251,7 +270,7 @@ public class TestHFilePerformance extend
 
     if ("HFile".equals(fileType)){
         HFile.Reader reader = HFile.createReaderFromStream(path, fs.open(path),
-          fs.getFileStatus(path).getLen(), new CacheConfig(conf));
+          fs.getFileStatus(path).getLen(), new CacheConfig(conf), conf);
         reader.loadFileInfo();
         switch (method) {
 
@@ -326,7 +345,7 @@ public class TestHFilePerformance extend
 
     System.out.println("****************************** Sequence File *****************************");
 
-    timeWrite("SequenceFile", keyLength, valueLength, "none", rows, null, minBlockSize);
+    timeWrite("SequenceFile", keyLength, valueLength, "none", "none", rows, null, minBlockSize);
     System.out.println("\n+++++++\n");
     timeReading("SequenceFile", keyLength, valueLength, rows, -1);
 
@@ -346,7 +365,7 @@ public class TestHFilePerformance extend
     /* Sequence file can only use native hadoop libs gzipping so commenting out.
      */
     try {
-      timeWrite("SequenceFile", keyLength, valueLength, "gz", rows, null,
+      timeWrite("SequenceFile", keyLength, valueLength, "gz", "none", rows, null,
         minBlockSize);
       System.out.println("\n+++++++\n");
       timeReading("SequenceFile", keyLength, valueLength, rows, -1);
@@ -358,13 +377,22 @@ public class TestHFilePerformance extend
     System.out.println("\n\n\n");
     System.out.println("****************************** HFile *****************************");
 
-    timeWrite("HFile", keyLength, valueLength, "none", rows, null, minBlockSize);
+    timeWrite("HFile", keyLength, valueLength, "none", "none", rows, null, minBlockSize);
     System.out.println("\n+++++++\n");
     timeReading("HFile", keyLength, valueLength, rows, 0 );
 
     System.out.println("");
     System.out.println("----------------------");
     System.out.println("");
+
+    timeWrite("HFile", keyLength, valueLength, "none", "aes", rows, null, minBlockSize);
+    System.out.println("\n+++++++\n");
+    timeReading("HFile", keyLength, valueLength, rows, 0 );
+
+    System.out.println("");
+    System.out.println("----------------------");
+    System.out.println("");
+
 /* DISABLED LZO
     timeWrite("HFile", keyLength, valueLength, "lzo", rows, null, minBlockSize);
     System.out.println("\n+++++++\n");
@@ -378,7 +406,16 @@ public class TestHFilePerformance extend
     System.out.println("----------------------");
     System.out.println("");
 */
-    timeWrite("HFile", keyLength, valueLength, "gz", rows, null, minBlockSize);
+
+    timeWrite("HFile", keyLength, valueLength, "gz", "none", rows, null, minBlockSize);
+    System.out.println("\n+++++++\n");
+    timeReading("HFile", keyLength, valueLength, rows, 0 );
+
+    System.out.println("");
+    System.out.println("----------------------");
+    System.out.println("");
+
+    timeWrite("HFile", keyLength, valueLength, "gz", "aes", rows, null, minBlockSize);
     System.out.println("\n+++++++\n");
     timeReading("HFile", keyLength, valueLength, rows, 0 );
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java Tue Nov 26 04:22:33 2013
@@ -180,7 +180,7 @@ public class TestHFileSeek extends TestC
     long totalBytes = 0;
     FSDataInputStream fsdis = fs.open(path);
     Reader reader = HFile.createReaderFromStream(path, fsdis,
-        fs.getFileStatus(path).getLen(), new CacheConfig(conf));
+        fs.getFileStatus(path).getLen(), new CacheConfig(conf), conf);
     reader.loadFileInfo();
     KeySampler kSampler =
         new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java Tue Nov 26 04:22:33 2013
@@ -94,7 +94,7 @@ public class TestReseekTo {
     fout.close();
 
     HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(),
-        ncTFile, cacheConf);
+        ncTFile, cacheConf, TEST_UTIL.getConfiguration());
     reader.loadFileInfo();
     HFileScanner scanner = reader.getScanner(false, true);
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java Tue Nov 26 04:22:33 2013
@@ -104,7 +104,7 @@ public class TestSeekTo extends HBaseTes
 
   protected void testSeekBeforeInternals(TagUsage tagUsage) throws IOException {
     Path p = makeNewFile(tagUsage);
-    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     HFileScanner scanner = reader.getScanner(false, true);
     assertEquals(false, scanner.seekBefore(toKV("a", tagUsage).getKey()));
@@ -145,7 +145,7 @@ public class TestSeekTo extends HBaseTes
 
   protected void testSeekBeforeWithReSeekToInternals(TagUsage tagUsage) throws IOException {
     Path p = makeNewFile(tagUsage);
-    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     HFileScanner scanner = reader.getScanner(false, true);
     assertEquals(false, scanner.seekBefore(toKV("a", tagUsage).getKey()));
@@ -234,7 +234,7 @@ public class TestSeekTo extends HBaseTes
 
   protected void testSeekToInternals(TagUsage tagUsage) throws IOException {
     Path p = makeNewFile(tagUsage);
-    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     assertEquals(2, reader.getDataBlockIndexReader().getRootBlockCount());
     HFileScanner scanner = reader.getScanner(false, true);
@@ -261,7 +261,7 @@ public class TestSeekTo extends HBaseTes
 
   protected void testBlockContainingKeyInternals(TagUsage tagUsage) throws IOException {
     Path p = makeNewFile(tagUsage);
-    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     HFileBlockIndex.BlockIndexReader blockIndexReader = 
       reader.getDataBlockIndexReader();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java Tue Nov 26 04:22:33 2013
@@ -268,7 +268,7 @@ public class TestHFileOutputFormat  {
 
       // open as HFile Reader and pull out TIMERANGE FileInfo.
       HFile.Reader rd = HFile.createReader(fs, file[0].getPath(),
-          new CacheConfig(conf));
+          new CacheConfig(conf), conf);
       Map<byte[],byte[]> finfo = rd.loadFileInfo();
       byte[] range = finfo.get("TIMERANGE".getBytes());
       assertNotNull(range);
@@ -602,7 +602,7 @@ public class TestHFileOutputFormat  {
         // verify that the compression on this file matches the configured
         // compression
         Path dataFilePath = fs.listStatus(f.getPath())[0].getPath();
-        Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf));
+        Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), conf);
         Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
 
         byte[] bloomFilter = fileInfo.get(StoreFile.BLOOM_FILTER_TYPE_KEY);
@@ -611,7 +611,7 @@ public class TestHFileOutputFormat  {
           "(reader: " + reader + ")",
           hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
         assertEquals("Incorrect compression used for column family " + familyStr +
-          "(reader: " + reader + ")", hcd.getCompression(), reader.getCompressionAlgorithm());
+          "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression());
       }
     } finally {
       dir.getFileSystem(conf).delete(dir, true);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Tue Nov 26 04:22:33 2013
@@ -241,9 +241,9 @@ public class TestLoadIncrementalHFiles {
   }
 
   private int verifyHFile(Path p) throws IOException {
-    Configuration configuration = util.getConfiguration();
+    Configuration conf = util.getConfiguration();
     HFile.Reader reader = HFile.createReader(
-        p.getFileSystem(configuration), p, new CacheConfig(configuration));
+        p.getFileSystem(conf), p, new CacheConfig(conf), conf);
     reader.loadFileInfo();
     HFileScanner scanner = reader.getScanner(false, false);
     scanner.seekTo();

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,234 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.*;
+
+import java.security.Key;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter.Predicate;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.security.EncryptionUtil;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestEncryptionKeyRotation {
+  private static final Log LOG = LogFactory.getLog(TestEncryptionKeyRotation.class);
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static final Configuration conf = TEST_UTIL.getConfiguration();
+  private static final Key initialCFKey;
+  private static final Key secondCFKey;
+  static {
+    // Create the test encryption keys
+    SecureRandom rng = new SecureRandom();
+    byte[] keyBytes = new byte[AES.KEY_LENGTH];
+    rng.nextBytes(keyBytes);
+    initialCFKey = new SecretKeySpec(keyBytes, "AES");
+    rng.nextBytes(keyBytes);
+    secondCFKey = new SecretKeySpec(keyBytes, "AES");
+  }
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    conf.setInt("hfile.format.version", 3);
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+    // Enable online schema updates
+    conf.setBoolean("hbase.online.schema.update.enable", true);
+
+    // Start the minicluster
+    TEST_UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testCFKeyRotation() throws Exception {
+    // Create the table schema
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default",
+      "testCFKeyRotation"));
+    HColumnDescriptor hcd = new HColumnDescriptor("cf");
+    hcd.setEncryptionType("AES");
+    hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey));
+    htd.addFamily(hcd);
+
+    // Create the table and some on disk files
+    createTableAndFlush(htd);
+
+    // Verify we have store file(s) with the initial key
+    final List<Path> initialPaths = findStorefilePaths(htd.getTableName());
+    assertTrue(initialPaths.size() > 0);
+    for (Path path: initialPaths) {
+      assertTrue("Store file " + path + " has incorrect key",
+        Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
+    }
+
+    // Update the schema with a new encryption key
+    hcd = htd.getFamily(Bytes.toBytes("cf"));
+    hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf,
+      conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()),
+      secondCFKey));
+    TEST_UTIL.getHBaseAdmin().modifyColumn(htd.getName(), hcd);
+    Thread.sleep(5000); // Need a predicate for online schema change
+
+    // And major compact
+    TEST_UTIL.getHBaseAdmin().majorCompact(htd.getName());
+    TEST_UTIL.waitFor(30000, 1000, true, new Predicate<Exception>() {
+      @Override
+      public boolean evaluate() throws Exception {
+        // When compaction has finished, all of the original files will be
+        // gone
+        boolean found = false;
+        for (Path path: initialPaths) {
+          found = TEST_UTIL.getTestFileSystem().exists(path);
+          if (found) {
+            LOG.info("Found " + path);
+            break;
+          }
+        }
+        return !found;
+      }
+    });
+
+    // Verify we have store file(s) with only the new key
+    List<Path> pathsAfterCompaction = findStorefilePaths(htd.getTableName());
+    assertTrue(pathsAfterCompaction.size() > 0);
+    for (Path path: pathsAfterCompaction) {
+      assertFalse("Store file " + path + " retains initial key",
+        Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
+      assertTrue("Store file " + path + " has incorrect key",
+        Bytes.equals(secondCFKey.getEncoded(), extractHFileKey(path)));
+    }
+  }
+
+  @Test
+  public void testMasterKeyRotation() throws Exception {
+    // Create the table schema
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("default",
+      "testMasterKeyRotation"));
+    HColumnDescriptor hcd = new HColumnDescriptor("cf");
+    hcd.setEncryptionType("AES");
+    hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey));
+    htd.addFamily(hcd);
+
+    // Create the table and some on disk files
+    createTableAndFlush(htd);
+
+    // Verify we have store file(s) with the initial key
+    List<Path> storeFilePaths = findStorefilePaths(htd.getTableName());
+    assertTrue(storeFilePaths.size() > 0);
+    for (Path path: storeFilePaths) {
+      assertTrue("Store file " + path + " has incorrect key",
+        Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
+    }
+
+    // Now shut down the HBase cluster
+    TEST_UTIL.shutdownMiniHBaseCluster();
+
+    // "Rotate" the master key
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "other");
+    conf.set(HConstants.CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY, "hbase");
+
+    // Start the cluster back up
+    TEST_UTIL.startMiniHBaseCluster(1, 1);
+    // Verify the table can still be loaded
+    TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
+    // Double check that the store file keys can be unwrapped
+    storeFilePaths = findStorefilePaths(htd.getTableName());
+    assertTrue(storeFilePaths.size() > 0);
+    for (Path path: storeFilePaths) {
+      assertTrue("Store file " + path + " has incorrect key",
+        Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
+    }
+  }
+
+  private static List<Path> findStorefilePaths(TableName tableName) throws Exception {
+    List<Path> paths = new ArrayList<Path>();
+    for (HRegion region:
+        TEST_UTIL.getRSForFirstRegionInTable(tableName).getOnlineRegions(tableName)) {
+      for (Store store: region.getStores().values()) {
+        for (StoreFile storefile: store.getStorefiles()) {
+          paths.add(storefile.getPath());
+        }
+      }
+    }
+    return paths;
+  }
+
+  private void createTableAndFlush(HTableDescriptor htd) throws Exception {
+    HColumnDescriptor hcd = htd.getFamilies().iterator().next();
+    // Create the test table
+    TEST_UTIL.getHBaseAdmin().createTable(htd);
+    TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
+    // Create a store file
+    HTable table = new HTable(conf, htd.getName());
+    try {
+      table.put(new Put(Bytes.toBytes("testrow"))
+        .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
+    } finally {
+      table.close();
+    }
+    TEST_UTIL.getHBaseAdmin().flush(htd.getName());
+  }
+
+  private static byte[] extractHFileKey(Path path) throws Exception {
+    HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
+      new CacheConfig(conf), conf);
+    try {
+      reader.loadFileInfo();
+      Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
+      assertNotNull("Reader has a null crypto context", cryptoContext);
+      Key key = cryptoContext.getKey();
+      assertNotNull("Crypto context has no key", key);
+      return key.getEncoded();
+    } finally {
+      reader.close();
+    }
+  }
+
+}

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import static org.junit.Assert.*;
+
+import java.security.Key;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestEncryptionRandomKeying {
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private static Configuration conf = TEST_UTIL.getConfiguration();
+  private static HTableDescriptor htd;
+
+  private static List<Path> findStorefilePaths(byte[] tableName) throws Exception {
+    List<Path> paths = new ArrayList<Path>();
+    for (HRegion region:
+        TEST_UTIL.getRSForFirstRegionInTable(tableName).getOnlineRegions(htd.getTableName())) {
+      for (Store store: region.getStores().values()) {
+        for (StoreFile storefile: store.getStorefiles()) {
+          paths.add(storefile.getPath());
+        }
+      }
+    }
+    return paths;
+  }
+
+  private static byte[] extractHFileKey(Path path) throws Exception {
+    HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
+      new CacheConfig(conf), conf);
+    try {
+      reader.loadFileInfo();
+      Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
+      assertNotNull("Reader has a null crypto context", cryptoContext);
+      Key key = cryptoContext.getKey();
+      if (key == null) {
+        return null;
+      }
+      return key.getEncoded();
+    } finally {
+      reader.close();
+    }
+  }
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    conf.setInt("hfile.format.version", 3);
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+
+    // Create the table schema
+    // Specify an encryption algorithm without a key
+    htd = new HTableDescriptor(TableName.valueOf("default", "TestEncryptionRandomKeying"));
+    HColumnDescriptor hcd = new HColumnDescriptor("cf");
+    hcd.setEncryptionType("AES");
+    htd.addFamily(hcd);
+
+    // Start the minicluster
+    TEST_UTIL.startMiniCluster(1);
+
+    // Create the test table
+    TEST_UTIL.getHBaseAdmin().createTable(htd);
+    TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
+
+    // Create a store file
+    HTable table = new HTable(conf, htd.getName());
+    try {
+      table.put(new Put(Bytes.toBytes("testrow"))
+        .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
+    } finally {
+      table.close();
+    }
+    TEST_UTIL.getHBaseAdmin().flush(htd.getName());
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testRandomKeying() throws Exception {
+    // Verify we have store file(s) with a random key
+    final List<Path> initialPaths = findStorefilePaths(htd.getName());
+    assertTrue(initialPaths.size() > 0);
+    for (Path path: initialPaths) {
+      assertNotNull("Store file " + path + " is not encrypted", extractHFileKey(path));
+    }
+  }
+
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Tue Nov 26 04:22:33 2013
@@ -192,7 +192,7 @@ public class TestStore extends TestCase 
     writer.close();
 
     // Verify that compression and encoding settings are respected
-    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
+    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
     assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
     assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding());
     reader.close();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Tue Nov 26 04:22:33 2013
@@ -458,7 +458,7 @@ public class TestStoreFile extends HBase
     }
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
+    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf, conf);
     reader.loadFileInfo();
     reader.loadBloomfilter();
     StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
@@ -539,7 +539,7 @@ public class TestStoreFile extends HBase
     }
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
+    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf, conf);
     reader.loadFileInfo();
     reader.loadBloomfilter();
 
@@ -584,7 +584,7 @@ public class TestStoreFile extends HBase
     writeStoreFile(writer);
     writer.close();
 
-    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
+    StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf, conf);
 
     // Now do reseek with empty KV to position to the beginning of the file
 
@@ -643,7 +643,7 @@ public class TestStoreFile extends HBase
       }
       writer.close();
 
-      StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf);
+      StoreFile.Reader reader = new StoreFile.Reader(fs, f, cacheConf, conf);
       reader.loadFileInfo();
       reader.loadBloomfilter();
       StoreFileScanner scanner = reader.getStoreFileScanner(false, false);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java Tue Nov 26 04:22:33 2013
@@ -25,7 +25,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.UUID;
-import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -46,6 +45,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -143,6 +143,8 @@ public final class HLogPerformanceEvalua
     boolean cleanup = true;
     boolean noclosefs = false;
     long roll = Long.MAX_VALUE;
+    boolean compress = false;
+    String cipher = null;
     // Process command line args
     for (int i = 0; i < args.length; i++) {
       String cmd = args[i];
@@ -173,6 +175,10 @@ public final class HLogPerformanceEvalua
           noclosefs = true;
         } else if (cmd.equals("-roll")) {
           roll = Long.parseLong(args[++i]);
+        } else if (cmd.equals("-compress")) {
+          compress = true;
+        } else if (cmd.equals("-encryption")) {
+          cipher = args[++i];
         } else if (cmd.equals("-h")) {
           printUsageAndExit();
         } else if (cmd.equals("--help")) {
@@ -186,6 +192,24 @@ public final class HLogPerformanceEvalua
       }
     }
 
+    if (compress) {
+      Configuration conf = getConf();
+      conf.setBoolean(HConstants.ENABLE_WAL_COMPRESSION, true);
+    }
+
+    if (cipher != null) {
+      // Set up HLog for encryption
+      Configuration conf = getConf();
+      conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+      conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+      conf.setClass("hbase.regionserver.hlog.reader.impl", SecureProtobufLogReader.class,
+        HLog.Reader.class);
+      conf.setClass("hbase.regionserver.hlog.writer.impl", SecureProtobufLogWriter.class,
+        HLog.Writer.class);
+      conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
+      conf.set(HConstants.CRYPTO_WAL_ALGORITHM_CONF_KEY, cipher);
+    }
+
     // Run HLog Performance Evaluation
     // First set the fs from configs.  In case we are on hadoop1
     FSUtils.setFsDefault(getConf(), FSUtils.getRootDir(getConf()));
@@ -322,6 +346,7 @@ public final class HLogPerformanceEvalua
     System.err.println("  -verify          Verify edits written in sequence");
     System.err.println("  -verbose         Output extra info; e.g. all edit seq ids when verifying");
     System.err.println("  -roll <N>        Roll the way every N appends");
+    System.err.println("  -encryption <A>  Encrypt the WAL with algorithm A, e.g. AES");
     System.err.println("");
     System.err.println("Examples:");
     System.err.println("");

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java Tue Nov 26 04:22:33 2013
@@ -85,8 +85,8 @@ public class SequenceFileLogWriter exten
   @Override
   public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable)
   throws IOException {
+    super.init(fs, path, conf, overwritable);
     boolean compress = initializeCompressionContext(conf, path);
-
     // Create a SF.Writer instance.
     try {
       // reflection for a version of SequenceFile.createWriter that doesn't

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Level;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestSecureHLog {
+  static final Log LOG = LogFactory.getLog(TestSecureHLog.class);
+  static {
+    ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal"))
+      .getLogger().setLevel(Level.ALL);
+  };
+  static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+    conf.setClass("hbase.regionserver.hlog.reader.impl", SecureProtobufLogReader.class,
+      HLog.Reader.class);
+    conf.setClass("hbase.regionserver.hlog.writer.impl", SecureProtobufLogWriter.class,
+      HLog.Writer.class);
+    conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
+  }
+
+  @Test
+  public void testSecureHLog() throws Exception {
+    TableName tableName = TableName.valueOf("TestSecureHLog");
+    HTableDescriptor htd = new HTableDescriptor(tableName);
+    htd.addFamily(new HColumnDescriptor(tableName.getName()));
+    HRegionInfo regioninfo = new HRegionInfo(tableName,
+      HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false);
+    final int total = 10;
+    final byte[] row = Bytes.toBytes("row");
+    final byte[] family = Bytes.toBytes("family");
+    final byte[] value = Bytes.toBytes("Test value");
+    FileSystem fs = TEST_UTIL.getTestFileSystem();
+    Path logDir = TEST_UTIL.getDataTestDir("log");
+    final AtomicLong sequenceId = new AtomicLong(1);
+
+    // Write the WAL
+    HLog wal = new FSHLog(fs, TEST_UTIL.getDataTestDir(), logDir.toString(),
+      TEST_UTIL.getConfiguration());
+    for (int i = 0; i < total; i++) {
+      WALEdit kvs = new WALEdit();
+      kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value));
+      wal.append(regioninfo, tableName, kvs, System.currentTimeMillis(), htd, sequenceId);
+    }
+    final Path walPath = ((FSHLog) wal).computeFilename();
+    wal.close();
+
+    // Insure edits are not plaintext
+    long length = fs.getFileStatus(walPath).getLen();
+    FSDataInputStream in = fs.open(walPath);
+    byte[] fileData = new byte[(int)length];
+    IOUtils.readFully(in, fileData);
+    in.close();
+    assertFalse("Cells appear to be plaintext", Bytes.contains(fileData, value));
+
+    // Confirm the WAL can be read back
+    HLog.Reader reader = HLogFactory.createReader(TEST_UTIL.getTestFileSystem(), walPath,
+      TEST_UTIL.getConfiguration());
+    int count = 0;
+    HLog.Entry entry = new HLog.Entry();
+    while (reader.next(entry) != null) {
+      count++;
+      List<KeyValue> kvs = entry.getEdit().getKeyValues();
+      assertTrue("Should be one KV per WALEdit", kvs.size() == 1);
+      for (KeyValue kv: kvs) {
+        byte[] thisRow = kv.getRow();
+        assertTrue("Incorrect row", Bytes.equals(thisRow, row));
+        byte[] thisFamily = kv.getFamily();
+        assertTrue("Incorrect family", Bytes.equals(thisFamily, family));
+        byte[] thisValue = kv.getValue();
+        assertTrue("Incorrect value", Bytes.equals(thisValue, value));
+      }
+    }
+    assertEquals("Should have read back as many KVs as written", total, count);
+    reader.close();
+  }
+
+}

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.wal;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+
+import org.junit.BeforeClass;
+import org.junit.experimental.categories.Category;
+
+@Category(MediumTests.class)
+public class TestSecureWALReplay extends TestWALReplay {
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    Configuration conf = TestWALReplay.TEST_UTIL.getConfiguration();
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+    conf.setClass("hbase.regionserver.hlog.reader.impl", SecureProtobufLogReader.class,
+      HLog.Reader.class);
+    conf.setClass("hbase.regionserver.hlog.writer.impl", SecureProtobufLogWriter.class,
+      HLog.Writer.class);
+    conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
+    TestWALReplay.setUpBeforeClass();
+  }
+
+}

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java Tue Nov 26 04:22:33 2013
@@ -18,12 +18,15 @@ package org.apache.hadoop.hbase.util;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
+import java.security.SecureRandom;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicReference;
 
+import javax.crypto.spec.SecretKeySpec;
+
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,8 +39,12 @@ import org.apache.hadoop.hbase.Performan
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.crypto.Cipher;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.security.EncryptionUtil;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -123,6 +130,11 @@ public class LoadTestTool extends Abstra
   protected static final String OPT_BATCHUPDATE = "batchupdate";
   protected static final String OPT_UPDATE = "update";
 
+  protected static final String OPT_ENCRYPTION = "encryption";
+  protected static final String OPT_ENCRYPTION_USAGE =
+    "Enables transparent encryption on the test table, one of " +
+    Arrays.toString(Encryption.getSupportedCiphers());
+
   protected static final long DEFAULT_START_KEY = 0;
 
   /** This will be removed as we factor out the dependency on command line */
@@ -168,6 +180,8 @@ public class LoadTestTool extends Abstra
   protected boolean isSkipInit = false;
   protected boolean isInitOnly = false;
 
+  protected Cipher cipher = null;
+
   protected String[] splitColonSeparated(String option,
       int minNumCols, int maxNumCols) {
     String optVal = cmd.getOptionValue(option);
@@ -213,6 +227,14 @@ public class LoadTestTool extends Abstra
       if (inMemoryCF) {
         columnDesc.setInMemory(inMemoryCF);
       }
+      if (cipher != null) {
+        byte[] keyBytes = new byte[cipher.getKeyLength()];
+        new SecureRandom().nextBytes(keyBytes);
+        columnDesc.setEncryptionType(cipher.getName());
+        columnDesc.setEncryptionKey(EncryptionUtil.wrapKey(conf,
+          User.getCurrent().getShortName(),
+          new SecretKeySpec(keyBytes, cipher.getName())));
+      }
       if (isNewCf) {
         admin.addColumn(tableName, columnDesc);
       } else {
@@ -261,6 +283,8 @@ public class LoadTestTool extends Abstra
       "A positive integer number. When a number n is speicfied, load test "
           + "tool  will load n table parallely. -tn parameter value becomes "
           + "table name prefix. Each table name is in format <tn>_1...<tn>_n");
+
+    addOptWithArg(OPT_ENCRYPTION, OPT_ENCRYPTION_USAGE);
   }
 
   @Override
@@ -396,6 +420,10 @@ public class LoadTestTool extends Abstra
           + maxNumTags);
     }
 
+    if (cmd.hasOption(OPT_ENCRYPTION)) {
+      cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
+    }
+
   }
 
   public void initTestTable() throws IOException {

Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java?rev=1545536&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java Tue Nov 26 04:22:33 2013
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.security.Key;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.aes.AES;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Store;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.security.EncryptionUtil;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
+import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(LargeTests.class)
+public class TestHBaseFsckEncryption {
+
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+
+  private Configuration conf;
+  private HTableDescriptor htd;
+  private Key cfKey;
+
+  @Before
+  public void setUp() throws Exception {
+    conf = TEST_UTIL.getConfiguration();
+    conf.setInt("hfile.format.version", 3);
+    conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+    conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
+
+    // Create the test encryption key
+    SecureRandom rng = new SecureRandom();
+    byte[] keyBytes = new byte[AES.KEY_LENGTH];
+    rng.nextBytes(keyBytes);
+    cfKey = new SecretKeySpec(keyBytes, "AES");
+
+    // Start the minicluster
+    TEST_UTIL.startMiniCluster(3);
+
+    // Create the table
+    htd = new HTableDescriptor(TableName.valueOf("default", "TestHBaseFsckEncryption"));
+    HColumnDescriptor hcd = new HColumnDescriptor("cf");
+    hcd.setEncryptionType("AES");
+    hcd.setEncryptionKey(EncryptionUtil.wrapKey(conf,
+      conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()),
+      cfKey));
+    htd.addFamily(hcd);
+    TEST_UTIL.getHBaseAdmin().createTable(htd);
+    TEST_UTIL.waitTableAvailable(htd.getName(), 5000);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Test
+  public void testFsckWithEncryption() throws Exception {
+    // Populate the table with some data
+    HTable table = new HTable(conf, htd.getName());
+    try {
+      byte[] values = { 'A', 'B', 'C', 'D' };
+      for (int i = 0; i < values.length; i++) {
+        for (int j = 0; j < values.length; j++) {
+          Put put = new Put(new byte[] { values[i], values[j] });
+          put.add(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i],
+            values[j] });
+          table.put(put);
+        }
+      }
+    } finally {
+      table.close();
+    }
+    // Flush it
+    TEST_UTIL.getHBaseAdmin().flush(htd.getName());
+
+    // Verify we have encrypted store files on disk
+    final List<Path> paths = findStorefilePaths(htd.getName());
+    assertTrue(paths.size() > 0);
+    for (Path path: paths) {
+      assertTrue("Store file " + path + " has incorrect key",
+        Bytes.equals(cfKey.getEncoded(), extractHFileKey(path)));
+    }
+
+    // Insure HBck doesn't consider them corrupt
+    HBaseFsck res = HbckTestingUtil.doHFileQuarantine(conf, htd.getTableName());
+    assertEquals(res.getRetCode(), 0);
+    HFileCorruptionChecker hfcc = res.getHFilecorruptionChecker();
+    assertEquals(hfcc.getCorrupted().size(), 0);
+    assertEquals(hfcc.getFailures().size(), 0);
+    assertEquals(hfcc.getQuarantined().size(), 0);
+    assertEquals(hfcc.getMissing().size(), 0);
+  }
+
+  private List<Path> findStorefilePaths(byte[] tableName) throws Exception {
+    List<Path> paths = new ArrayList<Path>();
+    for (HRegion region:
+        TEST_UTIL.getRSForFirstRegionInTable(tableName).getOnlineRegions(htd.getTableName())) {
+      for (Store store: region.getStores().values()) {
+        for (StoreFile storefile: store.getStorefiles()) {
+          paths.add(storefile.getPath());
+        }
+      }
+    }
+    return paths;
+  }
+
+  private byte[] extractHFileKey(Path path) throws Exception {
+    HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path,
+      new CacheConfig(conf), conf);
+    try {
+      reader.loadFileInfo();
+      Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
+      assertNotNull("Reader has a null crypto context", cryptoContext);
+      Key key = cryptoContext.getKey();
+      assertNotNull("Crypto context has no key", key);
+      return key.getEncoded();
+    } finally {
+      reader.close();
+    }
+  }
+
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-shell/src/main/ruby/hbase/admin.rb
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-shell/src/main/ruby/hbase/admin.rb?rev=1545536&r1=1545535&r2=1545536&view=diff
==============================================================================
--- hbase/trunk/hbase-shell/src/main/ruby/hbase/admin.rb (original)
+++ hbase/trunk/hbase-shell/src/main/ruby/hbase/admin.rb Tue Nov 26 04:22:33 2013
@@ -635,6 +635,16 @@ module Hbase
           family.setCompressionType(org.apache.hadoop.hbase.io.compress.Compression::Algorithm.valueOf(compression))
         end
       end
+      if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::ENCRYPTION)
+        algorithm = arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::ENCRYPTION).upcase
+        family.setEncryptionType(algorithm)
+        if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::ENCRYPTION_KEY)
+          key = org.apache.hadoop.hbase.io.crypto.Encryption.hash128(
+            arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::ENCRYPTION_KEY))
+          family.setEncryptionKey(org.apache.hadoop.hbase.security.EncryptionUtil.wrapKey(@conf, key,
+            algorithm))
+        end
+      end
 
       set_user_metadata(family, arg.delete(METADATA)) if arg[METADATA]
       set_descriptor_config(family, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]