You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2015/05/04 23:43:08 UTC

hbase git commit: HBASE-13464 Remove deprecations for 2.0.0 - Part 1 (Lars Francke)

Repository: hbase
Updated Branches:
  refs/heads/master 0dfb36472 -> 2ee5f8f5f


HBASE-13464 Remove deprecations for 2.0.0 - Part 1 (Lars Francke)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ee5f8f5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ee5f8f5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ee5f8f5

Branch: refs/heads/master
Commit: 2ee5f8f5fd834be20273829962e694dbd50ccc97
Parents: 0dfb364
Author: stack <st...@apache.org>
Authored: Mon May 4 14:42:58 2015 -0700
Committer: stack <st...@apache.org>
Committed: Mon May 4 14:42:58 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/ClusterStatus.java  |  26 --
 .../apache/hadoop/hbase/HColumnDescriptor.java  | 301 ++-----------------
 .../org/apache/hadoop/hbase/HRegionInfo.java    |  56 +---
 .../apache/hadoop/hbase/HTableDescriptor.java   |   7 +-
 .../hadoop/hbase/client/ClientScanner.java      |  13 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |  15 -
 .../hadoop/hbase/client/HTableInterface.java    |  13 +-
 .../client/UnmodifyableHTableDescriptor.java    |   7 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java     |   2 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  14 -
 .../hadoop/hbase/mapreduce/TableSplit.java      |  28 +-
 .../TestHColumnDescriptorDefaultVersions.java   |   8 +-
 .../apache/hadoop/hbase/client/TestAdmin1.java  |  10 -
 .../hadoop/hbase/client/TestFromClientSide.java |   4 +-
 .../coprocessor/SampleRegionWALObserver.java    |   2 +-
 .../hbase/mapreduce/TestImportExport.java       |   9 +-
 .../master/TestDistributedLogSplitting.java     |   2 +-
 .../regionserver/TestRegionSplitPolicy.java     |   4 +-
 .../wal/TestWALActionsListener.java             |   2 +-
 .../TestReplicationSourceManager.java           |   2 +-
 .../apache/hadoop/hbase/wal/TestWALFactory.java |   6 +-
 21 files changed, 59 insertions(+), 472 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 52b34a3..edfaa6f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -85,18 +85,6 @@ public class ClusterStatus extends VersionedWritable {
   private String[] masterCoprocessors;
   private Boolean balancerOn;
 
-  /**
-   * Constructor, for Writable
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-6038">HBASE-6038</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Used by Writables and Writables are going away.
-   */
-  @Deprecated
-  public ClusterStatus() {
-    super();
-  }
-
   public ClusterStatus(final String hbaseVersion, final String clusterid,
       final Map<ServerName, ServerLoad> servers,
       final Collection<ServerName> deadServers,
@@ -220,20 +208,6 @@ public class ClusterStatus extends VersionedWritable {
   // Getters
   //
 
-  /**
-   * Returns detailed region server information: A list of
-   * {@link ServerName}.
-   * @return region server information
-   * @deprecated As of release 0.92
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-1502">HBASE-1502</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #getServers()}.
-   */
-  @Deprecated
-  public Collection<ServerName> getServerInfo() {
-    return getServers();
-  }
-
   public Collection<ServerName> getServers() {
     if (liveServers == null) {
       return Collections.<ServerName>emptyList();

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index f4ac197..6cbe804 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -285,16 +285,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   private int cachedMaxVersions = UNINITIALIZED;
 
   /**
-   * Default constructor.
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-5453">HBASE-5453</a>).
-   *             This will be made private in HBase 2.0.0.
-   *             Used by Writables and Writables are going away.
-   */
-  @Deprecated
-  // Make this private rather than remove after deprecation period elapses.  Its needed by pb
-  // deserializations.
-  public HColumnDescriptor() {
+   * Default constructor. Must be present for PB deserializations.
+   */
+  private HColumnDescriptor() {
     this.name = null;
   }
 
@@ -317,10 +310,20 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
    * letter -- and may not contain a <code>:</code>
    */
   public HColumnDescriptor(final byte [] familyName) {
-    this (familyName == null || familyName.length <= 0?
-      HConstants.EMPTY_BYTE_ARRAY: familyName, DEFAULT_VERSIONS,
-      DEFAULT_COMPRESSION, DEFAULT_IN_MEMORY, DEFAULT_BLOCKCACHE,
-      DEFAULT_TTL, DEFAULT_BLOOMFILTER);
+    isLegalFamilyName(familyName);
+    this.name = familyName;
+
+    setMaxVersions(DEFAULT_VERSIONS);
+    setMinVersions(DEFAULT_MIN_VERSIONS);
+    setKeepDeletedCells(DEFAULT_KEEP_DELETED);
+    setInMemory(DEFAULT_IN_MEMORY);
+    setBlockCacheEnabled(DEFAULT_BLOCKCACHE);
+    setTimeToLive(DEFAULT_TTL);
+    setCompressionType(Compression.Algorithm.valueOf(DEFAULT_COMPRESSION.toUpperCase()));
+    setDataBlockEncoding(DataBlockEncoding.valueOf(DEFAULT_DATA_BLOCK_ENCODING.toUpperCase()));
+    setBloomFilterType(BloomType.valueOf(DEFAULT_BLOOMFILTER.toUpperCase()));
+    setBlocksize(DEFAULT_BLOCKSIZE);
+    setScope(DEFAULT_REPLICATION_SCOPE);
   }
 
   /**
@@ -343,148 +346,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   }
 
   /**
-   * Constructor
-   * @param familyName Column family name. Must be 'printable' -- digit or
-   * letter -- and may not contain a <code>:</code>
-   * @param maxVersions Maximum number of versions to keep
-   * @param compression Compression type
-   * @param inMemory If true, column data should be kept in an HRegionServer's
-   * cache
-   * @param blockCacheEnabled If true, MapFile blocks should be cached
-   * @param timeToLive Time-to-live of cell contents, in seconds
-   * (use HConstants.FOREVER for unlimited TTL)
-   * @param bloomFilter Bloom filter type for this column
-   *
-   * @throws IllegalArgumentException if passed a family name that is made of
-   * other than 'word' characters: i.e. <code>[a-zA-Z_0-9]</code> or contains
-   * a <code>:</code>
-   * @throws IllegalArgumentException if the number of versions is &lt;= 0
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-">HBASE-</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #HColumnDescriptor(String)} and setters.
-   */
-  @Deprecated
-  public HColumnDescriptor(final byte [] familyName, final int maxVersions,
-      final String compression, final boolean inMemory,
-      final boolean blockCacheEnabled,
-      final int timeToLive, final String bloomFilter) {
-    this(familyName, maxVersions, compression, inMemory, blockCacheEnabled,
-      DEFAULT_BLOCKSIZE, timeToLive, bloomFilter, DEFAULT_REPLICATION_SCOPE);
-  }
-
-  /**
-   * Constructor
-   * @param familyName Column family name. Must be 'printable' -- digit or
-   * letter -- and may not contain a <code>:</code>
-   * @param maxVersions Maximum number of versions to keep
-   * @param compression Compression type
-   * @param inMemory If true, column data should be kept in an HRegionServer's
-   * cache
-   * @param blockCacheEnabled If true, MapFile blocks should be cached
-   * @param blocksize Block size to use when writing out storefiles.  Use
-   * smaller block sizes for faster random-access at expense of larger indices
-   * (more memory consumption).  Default is usually 64k.
-   * @param timeToLive Time-to-live of cell contents, in seconds
-   * (use HConstants.FOREVER for unlimited TTL)
-   * @param bloomFilter Bloom filter type for this column
-   * @param scope The scope tag for this column
-   *
-   * @throws IllegalArgumentException if passed a family name that is made of
-   * other than 'word' characters: i.e. <code>[a-zA-Z_0-9]</code> or contains
-   * a <code>:</code>
-   * @throws IllegalArgumentException if the number of versions is &lt;= 0
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-">HBASE-</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #HColumnDescriptor(String)} and setters.
-   */
-  @Deprecated
-  public HColumnDescriptor(final byte [] familyName, final int maxVersions,
-      final String compression, final boolean inMemory,
-      final boolean blockCacheEnabled, final int blocksize,
-      final int timeToLive, final String bloomFilter, final int scope) {
-    this(familyName, DEFAULT_MIN_VERSIONS, maxVersions, DEFAULT_KEEP_DELETED,
-        compression, DEFAULT_ENCODE_ON_DISK, DEFAULT_DATA_BLOCK_ENCODING,
-        inMemory, blockCacheEnabled, blocksize, timeToLive, bloomFilter,
-        scope);
-  }
-
-  /**
-   * Constructor
-   * @param familyName Column family name. Must be 'printable' -- digit or
-   * letter -- and may not contain a <code>:</code>
-   * @param minVersions Minimum number of versions to keep
-   * @param maxVersions Maximum number of versions to keep
-   * @param keepDeletedCells Whether to retain deleted cells until they expire
-   *        up to maxVersions versions.
-   * @param compression Compression type
-   * @param encodeOnDisk whether to use the specified data block encoding
-   *        on disk. If false, the encoding will be used in cache only.
-   * @param dataBlockEncoding data block encoding
-   * @param inMemory If true, column data should be kept in an HRegionServer's
-   * cache
-   * @param blockCacheEnabled If true, MapFile blocks should be cached
-   * @param blocksize Block size to use when writing out storefiles.  Use
-   * smaller blocksizes for faster random-access at expense of larger indices
-   * (more memory consumption).  Default is usually 64k.
-   * @param timeToLive Time-to-live of cell contents, in seconds
-   * (use HConstants.FOREVER for unlimited TTL)
-   * @param bloomFilter Bloom filter type for this column
-   * @param scope The scope tag for this column
-   *
-   * @throws IllegalArgumentException if passed a family name that is made of
-   * other than 'word' characters: i.e. <code>[a-zA-Z_0-9]</code> or contains
-   * a <code>:</code>
-   * @throws IllegalArgumentException if the number of versions is &lt;= 0
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-">HBASE-</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #HColumnDescriptor(String)} and setters.
-   */
-  @Deprecated
-  public HColumnDescriptor(final byte[] familyName, final int minVersions,
-      final int maxVersions, final KeepDeletedCells keepDeletedCells,
-      final String compression, final boolean encodeOnDisk,
-      final String dataBlockEncoding, final boolean inMemory,
-      final boolean blockCacheEnabled, final int blocksize,
-      final int timeToLive, final String bloomFilter, final int scope) {
-    isLegalFamilyName(familyName);
-    this.name = familyName;
-
-    if (maxVersions <= 0) {
-      // TODO: Allow maxVersion of 0 to be the way you say "Keep all versions".
-      // Until there is support, consider 0 or < 0 -- a configuration error.
-      throw new IllegalArgumentException("Maximum versions must be positive");
-    }
-
-    if (minVersions > 0) {
-      if (timeToLive == HConstants.FOREVER) {
-        throw new IllegalArgumentException("Minimum versions requires TTL.");
-      }
-      if (minVersions >= maxVersions) {
-        throw new IllegalArgumentException("Minimum versions must be < "
-            + "maximum versions.");
-      }
-    }
-
-    setMaxVersions(maxVersions);
-    setMinVersions(minVersions);
-    setKeepDeletedCells(keepDeletedCells);
-    setInMemory(inMemory);
-    setBlockCacheEnabled(blockCacheEnabled);
-    setTimeToLive(timeToLive);
-    setCompressionType(Compression.Algorithm.
-      valueOf(compression.toUpperCase()));
-    setDataBlockEncoding(DataBlockEncoding.
-        valueOf(dataBlockEncoding.toUpperCase()));
-    setBloomFilterType(BloomType.
-      valueOf(bloomFilter.toUpperCase()));
-    setBlocksize(blocksize);
-    setScope(scope);
-  }
-
-  /**
    * @param b Family name.
    * @return <code>b</code>
    * @throws IllegalArgumentException If not null and not a legitimate family
@@ -684,30 +545,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   }
 
   /**
-   * @return data block encoding algorithm used on disk
-   * @deprecated As of release 0.98
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9870">HBASE-9870</a>).
-   *             This will be removed in HBase 2.0.0. See {@link #getDataBlockEncoding()}}
-   */
-  @Deprecated
-  public DataBlockEncoding getDataBlockEncodingOnDisk() {
-    return getDataBlockEncoding();
-  }
-
-  /**
-   * This method does nothing now. Flag ENCODE_ON_DISK is not used
-   * any more. Data blocks have the same encoding in cache as on disk.
-   * @return this (for chained invocation)
-   * @deprecated As of release 0.98
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9870">HBASE-9870</a>).
-   *             This will be removed in HBase 2.0.0. This method does nothing now.
-   */
-  @Deprecated
-  public HColumnDescriptor setEncodeOnDisk(boolean encodeOnDisk) {
-    return this;
-  }
-
-  /**
    * @return the data block encoding algorithm used in block cache and
    *         optionally on disk
    */
@@ -748,23 +585,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   /**
    * @return Whether KV tags should be compressed along with DataBlockEncoding. When no
    *         DataBlockEncoding is been used, this is having no effect.
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0. Use {@link #isCompressTags()} instead.
-   */
-  @Deprecated
-  public boolean shouldCompressTags() {
-    String compressTagsStr = getValue(COMPRESS_TAGS);
-    boolean compressTags = DEFAULT_COMPRESS_TAGS;
-    if (compressTagsStr != null) {
-      compressTags = Boolean.parseBoolean(compressTagsStr);
-    }
-    return compressTags;
-  }
-
-  /**
-   * @return Whether KV tags should be compressed along with DataBlockEncoding. When no
-   *         DataBlockEncoding is been used, this is having no effect.
    */
   public boolean isCompressTags() {
     String compressTagsStr = getValue(COMPRESS_TAGS);
@@ -829,21 +649,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
    * @param keepDeletedCells True if deleted rows should not be collected
    * immediately.
    * @return this (for chained invocation)
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-12363">HBASE-12363</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #setKeepDeletedCells(KeepDeletedCells)}.
-   */
-  @Deprecated
-  public HColumnDescriptor setKeepDeletedCells(boolean keepDeletedCells) {
-    return setValue(KEEP_DELETED_CELLS, (keepDeletedCells ? KeepDeletedCells.TRUE
-        : KeepDeletedCells.FALSE).toString());
-  }
-
-  /**
-   * @param keepDeletedCells True if deleted rows should not be collected
-   * immediately.
-   * @return this (for chained invocation)
    */
   public HColumnDescriptor setKeepDeletedCells(KeepDeletedCells keepDeletedCells) {
     return setValue(KEEP_DELETED_CELLS, keepDeletedCells.toString());
@@ -943,17 +748,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
 
   /**
    * @return true if we should cache data blocks on write
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0. Use {@link #isCacheDataOnWrite()}} instead.
-   */
-  @Deprecated
-  public boolean shouldCacheDataOnWrite() {
-    return setAndGetBoolean(CACHE_DATA_ON_WRITE, DEFAULT_CACHE_DATA_ON_WRITE);
-  }
-
-  /**
-   * @return true if we should cache data blocks on write
    */
   public boolean isCacheDataOnWrite() {
     return setAndGetBoolean(CACHE_DATA_ON_WRITE, DEFAULT_CACHE_DATA_ON_WRITE);
@@ -968,18 +762,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   }
 
   /**
-   * @return true if we should cache data blocks in the L1 cache (if block cache deploy
-   * has more than one tier; e.g. we are using CombinedBlockCache).
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0. Use {@link #isCacheDataInL1()}} instead.
-   */
-  @Deprecated
-  public boolean shouldCacheDataInL1() {
-    return setAndGetBoolean(CACHE_DATA_IN_L1, DEFAULT_CACHE_DATA_IN_L1);
-  }
-
-  /**
    * @return true if we should cache data blocks in the L1 cache (if block cache deploy has more
    *         than one tier; e.g. we are using CombinedBlockCache).
    */
@@ -1006,18 +788,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
 
   /**
    * @return true if we should cache index blocks on write
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #isCacheIndexesOnWrite()} instead.
-   */
-  @Deprecated
-  public boolean shouldCacheIndexesOnWrite() {
-    return setAndGetBoolean(CACHE_INDEX_ON_WRITE, DEFAULT_CACHE_INDEX_ON_WRITE);
-  }
-
-  /**
-   * @return true if we should cache index blocks on write
    */
   public boolean isCacheIndexesOnWrite() {
     return setAndGetBoolean(CACHE_INDEX_ON_WRITE, DEFAULT_CACHE_INDEX_ON_WRITE);
@@ -1033,18 +803,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
 
   /**
    * @return true if we should cache bloomfilter blocks on write
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #isCacheBloomsOnWrite()}} instead.
-   */
-  @Deprecated
-  public boolean shouldCacheBloomsOnWrite() {
-    return setAndGetBoolean(CACHE_BLOOMS_ON_WRITE, DEFAULT_CACHE_BLOOMS_ON_WRITE);
-  }
-
-  /**
-   * @return true if we should cache bloomfilter blocks on write
    */
   public boolean isCacheBloomsOnWrite() {
     return setAndGetBoolean(CACHE_BLOOMS_ON_WRITE, DEFAULT_CACHE_BLOOMS_ON_WRITE);
@@ -1059,19 +817,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
   }
 
   /**
-   * @return true if we should evict cached blocks from the blockcache on
-   * close
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #isEvictBlocksOnClose()}} instead.
-   */
-  @Deprecated
-  public boolean shouldEvictBlocksOnClose() {
-    return setAndGetBoolean(EVICT_BLOCKS_ON_CLOSE, DEFAULT_EVICT_BLOCKS_ON_CLOSE);
-  }
-
-  /**
    * @return true if we should evict cached blocks from the blockcache on close
    */
   public boolean isEvictBlocksOnClose() {
@@ -1089,18 +834,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
 
   /**
    * @return true if we should prefetch blocks into the blockcache on open
-   * @deprecated As of release 1.0.0
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-10870">HBASE-10870</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #isPrefetchBlocksOnOpen()}}} instead.
-   */
-  @Deprecated
-  public boolean shouldPrefetchBlocksOnOpen() {
-    return setAndGetBoolean(PREFETCH_BLOCKS_ON_OPEN, DEFAULT_PREFETCH_BLOCKS_ON_OPEN);
-  }
-
-  /**
-   * @return true if we should prefetch blocks into the blockcache on open
    */
   public boolean isPrefetchBlocksOnOpen() {
     return setAndGetBoolean(PREFETCH_BLOCKS_ON_OPEN, DEFAULT_PREFETCH_BLOCKS_ON_OPEN);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index 7b754f4..ad85d5e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -233,17 +233,6 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
     setHashCode();
   }
 
-  /** Default constructor - creates empty object
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-5453">HBASE-5453</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Used by Writables and Writables are going away.
-   */
-  @Deprecated
-  public HRegionInfo() {
-    super();
-  }
-
   public HRegionInfo(final TableName tableName) {
     this(tableName, null, null);
   }
@@ -487,14 +476,10 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
 
   /**
    * Gets the table name from the specified region name.
-   * @param regionName
-   * @return Table name.
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0. Use {@link #getTable(byte[])}.
+   * @param regionName to extract the table name from
+   * @return Table name
    */
-  @Deprecated
-  public static byte [] getTableName(byte[] regionName) {
+  public static TableName getTable(final byte [] regionName) {
     int offset = -1;
     for (int i = 0; i < regionName.length; i++) {
       if (regionName[i] == HConstants.DELIMITER) {
@@ -504,19 +489,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
     }
     byte[] buff  = new byte[offset];
     System.arraycopy(regionName, 0, buff, 0, offset);
-    return buff;
-  }
-
-
-  /**
-   * Gets the table name from the specified region name.
-   * Like {@link #getTableName(byte[])} only returns a {@link TableName} rather than a byte array.
-   * @param regionName
-   * @return Table name
-   * @see #getTableName(byte[])
-   */
-  public static TableName getTable(final byte [] regionName) {
-    return TableName.valueOf(getTableName(regionName));
+    return TableName.valueOf(buff);
   }
 
   /**
@@ -657,24 +630,11 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
 
   /**
    * Get current table name of the region
-   * @return byte array of table name
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0. Use {@link #getTable()}.
-   */
-  @Deprecated
-  public byte [] getTableName() {
-    return getTable().toBytes();
-  }
-
-  /**
-   * Get current table name of the region
    * @return TableName
-   * @see #getTableName()
    */
   public TableName getTable() {
     // This method name should be getTableName but there was already a method getTableName
-    // that returned a byte array.  It is unfortunate given everwhere else, getTableName returns
+    // that returned a byte array.  It is unfortunate given everywhere else, getTableName returns
     // a TableName instance.
     if (tableName == null || tableName.getName().length == 0) {
       tableName = getTable(getRegionName());
@@ -1052,7 +1012,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
   }
 
   /**
-   * Get the end key for display. Optionally hide the real end key. 
+   * Get the end key for display. Optionally hide the real end key.
    * @param hri
    * @param conf
    * @return the endkey
@@ -1064,7 +1024,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
   }
 
   /**
-   * Get the start key for display. Optionally hide the real start key. 
+   * Get the start key for display. Optionally hide the real start key.
    * @param hri
    * @param conf
    * @return the startkey
@@ -1123,7 +1083,7 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
         modifiedName[lengthSoFar - 1] = ENC_SEPARATOR;
         System.arraycopy(encodedRegionName, 0, modifiedName, lengthSoFar,
             encodedRegionName.length);
-        lengthSoFar += encodedRegionName.length; 
+        lengthSoFar += encodedRegionName.length;
         modifiedName[lengthSoFar] = ENC_SEPARATOR;
         return modifiedName;
       } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 24af4ab..a0a3c79 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -294,13 +294,12 @@ public class HTableDescriptor implements Comparable<HTableDescriptor> {
   /**
    * Default constructor which constructs an empty object.
    * For deserializing an HTableDescriptor instance only.
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-5453">HBASE-5453</a>).
-   *             This will be removed in HBase 2.0.0.
+   * @deprecated As of release 0.96 (<a href="https://issues.apache.org/jira/browse/HBASE-5453">HBASE-5453</a>).
+   *             This was made protected in 2.0.0 and will be removed in HBase 3.0.0.
    *             Used by Writables and Writables are going away.
    */
   @Deprecated
-  public HTableDescriptor() {
+  protected HTableDescriptor() {
     super();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 48bb2f0..1ea8769 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -163,17 +163,6 @@ public class ClientScanner extends AbstractClientScanner {
       return this.connection;
     }
 
-    /**
-     * @return Table name
-     * @deprecated As of release 0.96
-     *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-     *             This will be removed in HBase 2.0.0. Use {@link #getTable()}.
-     */
-    @Deprecated
-    protected byte [] getTableName() {
-      return this.tableName.getName();
-    }
-
     protected TableName getTable() {
       return this.tableName;
     }
@@ -341,7 +330,7 @@ public class ClientScanner extends AbstractClientScanner {
      *
      * By default, scan metrics are disabled; if the application wants to collect them, this
      * behavior can be turned on by calling calling {@link Scan#setScanMetricsEnabled(boolean)}
-     * 
+     *
      * <p>This invocation clears the scan metrics. Metrics are aggregated in the Scan instance.
      */
     protected void writeScanMetrics() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index e0c2977..324fe61 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -973,21 +973,6 @@ public class HTable implements HTableInterface {
   }
 
   /**
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #incrementColumnValue(byte[], byte[], byte[], long, Durability)}.
-   */
-  @Deprecated
-  @Override
-  public long incrementColumnValue(final byte [] row, final byte [] family,
-      final byte [] qualifier, final long amount, final boolean writeToWAL)
-  throws IOException {
-    return incrementColumnValue(row, family, qualifier, amount,
-      writeToWAL? Durability.SKIP_WAL: Durability.USE_DEFAULT);
-  }
-
-  /**
    * {@inheritDoc}
    */
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
index 5823f69..8436307 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
@@ -46,17 +46,6 @@ public interface HTableInterface extends Table {
   byte[] getTableName();
 
   /**
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #incrementColumnValue(byte[], byte[], byte[], long, Durability)}.
-   */
-  @Deprecated
-  long incrementColumnValue(final byte [] row, final byte [] family,
-      final byte [] qualifier, final long amount, final boolean writeToWAL)
-  throws IOException;
-
-  /**
    * @deprecated Use {@link #existsAll(java.util.List)}  instead.
    */
   @Deprecated
@@ -121,7 +110,7 @@ public interface HTableInterface extends Table {
    */
   @Deprecated
   void setAutoFlushTo(boolean autoFlush);
-  
+
   /**
    * Tells whether or not 'auto-flush' is turned on.
    *

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
index e1eb755..7331983 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
@@ -30,7 +30,12 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class UnmodifyableHTableDescriptor extends HTableDescriptor {
-  /** Default constructor */
+  /**
+   * Default constructor.
+   * @deprecated  As of release 2.0.0. This will be removed in HBase 3.0.0.
+   *              Use {@link #UnmodifyableHTableDescriptor(HTableDescriptor)}.
+   */
+  @Deprecated
   public UnmodifyableHTableDescriptor() {
     super();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 8b5b2d7..cda6bd7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -2589,7 +2589,7 @@ public final class ProtobufUtil {
     // input / output paths are relative to the store dir
     // store dir is relative to region dir
     CompactionDescriptor.Builder builder = CompactionDescriptor.newBuilder()
-        .setTableName(ByteStringer.wrap(info.getTableName()))
+        .setTableName(ByteStringer.wrap(info.getTable().toBytes()))
         .setEncodedRegionName(ByteStringer.wrap(info.getEncodedNameAsBytes()))
         .setFamilyName(ByteStringer.wrap(family))
         .setStoreHomeDir(storeDir.getName()); //make relative

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 324a3cf..d9829a7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.RegionServerCallable;
@@ -670,19 +669,6 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
   }
 
   /**
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link #tryAtomicRegionLoad(Connection, TableName, byte[], Collection)}.
-   */
-  @Deprecated
-  protected List<LoadQueueItem> tryAtomicRegionLoad(final HConnection conn,
-      final byte [] tableName, final byte[] first, Collection<LoadQueueItem> lqis)
-  throws IOException {
-    return tryAtomicRegionLoad(conn, TableName.valueOf(tableName), first, lqis);
-  }
-
-  /**
    * Attempts to do an atomic load of many hfiles into a region.  If it fails,
    * it returns a list of hfiles that need to be retried.  If it is successful
    * it will return an empty list.

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
index 7111668..f2f5a67 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
@@ -46,7 +46,7 @@ implements Writable, Comparable<TableSplit> {
   /** @deprecated LOG variable would be made private. */
   @Deprecated
   public static final Log LOG = LogFactory.getLog(TableSplit.class);
-  
+
   // should be < 0 (@see #readFields(DataInput))
   // version 1 supports Scan data member
   enum Version {
@@ -77,7 +77,7 @@ implements Writable, Comparable<TableSplit> {
       return byCode[code * -1];
     }
   }
-  
+
   private static final Version VERSION = Version.INITIAL;
   private TableName tableName;
   private byte [] startRow;
@@ -93,18 +93,6 @@ implements Writable, Comparable<TableSplit> {
   }
 
   /**
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link TableSplit#TableSplit(TableName, byte[], byte[], String)}.
-   */
-  @Deprecated
-  public TableSplit(final byte [] tableName, Scan scan, byte [] startRow, byte [] endRow,
-      final String location) {
-    this(TableName.valueOf(tableName), scan, startRow, endRow, location);
-  }
-
-  /**
    * Creates a new instance while assigning all variables.
    * Length of region is set to 0
    *
@@ -144,18 +132,6 @@ implements Writable, Comparable<TableSplit> {
   }
 
   /**
-   * @deprecated As of release 0.96
-   *             (<a href="https://issues.apache.org/jira/browse/HBASE-9508">HBASE-9508</a>).
-   *             This will be removed in HBase 2.0.0.
-   *             Use {@link TableSplit#TableSplit(TableName, byte[], byte[], String)}.
-   */
-  @Deprecated
-  public TableSplit(final byte [] tableName, byte[] startRow, byte[] endRow,
-      final String location) {
-    this(TableName.valueOf(tableName), startRow, endRow, location);
-  }
-
-  /**
    * Creates a new instance without a scanner.
    *
    * @param tableName The name of the current table.

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
index 4fa945a..94df71f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
@@ -120,10 +120,8 @@ public class TestHColumnDescriptorDefaultVersions {
     Admin admin = TEST_UTIL.getHBaseAdmin();
     // Create a table with one family
     HTableDescriptor baseHtd = new HTableDescriptor(TABLE_NAME);
-    HColumnDescriptor hcd =
-        new HColumnDescriptor(FAMILY, 5, HColumnDescriptor.DEFAULT_COMPRESSION,
-            HColumnDescriptor.DEFAULT_IN_MEMORY, HColumnDescriptor.DEFAULT_BLOCKCACHE,
-            HColumnDescriptor.DEFAULT_TTL, HColumnDescriptor.DEFAULT_BLOOMFILTER);
+    HColumnDescriptor hcd = new HColumnDescriptor(FAMILY);
+    hcd.setMaxVersions(5);
     baseHtd.addFamily(hcd);
     admin.createTable(baseHtd);
     admin.disableTable(TABLE_NAME);
@@ -161,4 +159,4 @@ public class TestHColumnDescriptorDefaultVersions {
     }
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index 62e8a34..7917dfd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -1272,16 +1272,6 @@ public class TestAdmin1 {
     }
   }
 
-  /**
-   * HADOOP-2156
-   * @throws IOException
-   */
-  @SuppressWarnings("deprecation")
-  @Test (expected=IllegalArgumentException.class, timeout=300000)
-  public void testEmptyHTableDescriptor() throws IOException {
-    this.admin.createTable(new HTableDescriptor());
-  }
-
   @Test (expected=IllegalArgumentException.class, timeout=300000)
   public void testInvalidHColumnDescriptor() throws IOException {
      new HColumnDescriptor("/cfamily/name");

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index edf2293..3dc1387 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeepDeletedCells;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.RegionLocations;
@@ -182,7 +183,8 @@ public class TestFromClientSide {
      final byte[] T2 = Bytes.toBytes("T2");
      final byte[] T3 = Bytes.toBytes("T3");
      HColumnDescriptor hcd = new HColumnDescriptor(FAMILY)
-         .setKeepDeletedCells(true).setMaxVersions(3);
+         .setKeepDeletedCells(KeepDeletedCells.TRUE)
+         .setMaxVersions(3);
 
      HTableDescriptor desc = new HTableDescriptor(TABLENAME);
      desc.addFamily(hcd);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
index d7852f1..dd8c7b4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
@@ -109,7 +109,7 @@ implements WALObserver {
       HRegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {
     boolean bypass = false;
     // check table name matches or not.
-    if (!Bytes.equals(info.getTableName(), this.tableName)) {
+    if (!Bytes.equals(info.getTable().toBytes(), this.tableName)) {
       return bypass;
     }
     preWALWriteCalled = true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 4e3baad..baabc0b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeepDeletedCells;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
@@ -281,7 +282,7 @@ public class TestImportExport {
     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
     desc.addFamily(new HColumnDescriptor(FAMILYA)
         .setMaxVersions(5)
-        .setKeepDeletedCells(true)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
     );
     UTIL.getHBaseAdmin().createTable(desc);
     Table t = UTIL.getConnection().getTable(desc.getTableName());
@@ -312,7 +313,7 @@ public class TestImportExport {
     desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
     desc.addFamily(new HColumnDescriptor(FAMILYA)
         .setMaxVersions(5)
-        .setKeepDeletedCells(true)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
     );
     UTIL.getHBaseAdmin().createTable(desc);
     t.close();
@@ -347,7 +348,7 @@ public class TestImportExport {
     HTableDescriptor desc = new HTableDescriptor(EXPORT_TABLE);
     desc.addFamily(new HColumnDescriptor(FAMILYA)
         .setMaxVersions(5)
-        .setKeepDeletedCells(true)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
     );
     UTIL.getHBaseAdmin().createTable(desc);
 
@@ -383,7 +384,7 @@ public class TestImportExport {
     desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
     desc.addFamily(new HColumnDescriptor(FAMILYA)
         .setMaxVersions(5)
-        .setKeepDeletedCells(true)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
     );
     UTIL.getHBaseAdmin().createTable(desc);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index 28e7934..e4a9f71 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -1259,7 +1259,7 @@ public class TestDistributedLogSplitting {
     byte[] family = Bytes.toBytes("family");
     byte[] qualifier = Bytes.toBytes("c1");
     long timeStamp = System.currentTimeMillis();
-    HTableDescriptor htd = new HTableDescriptor();
+    HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(family));
     final WAL wal = hrs.getWAL(curRegionInfo);
     for (int i = 0; i < NUM_LOG_LINES; i += 1) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
index dd7c61a..3ceb8fe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
@@ -150,7 +150,7 @@ public class TestRegionSplitPolicy {
    */
   @Test
   public void testCustomPolicy() throws IOException {
-    HTableDescriptor myHtd = new HTableDescriptor();
+    HTableDescriptor myHtd = new HTableDescriptor(TableName.valueOf("foobar"));
     myHtd.setValue(HTableDescriptor.SPLIT_POLICY,
         KeyPrefixRegionSplitPolicy.class.getName());
     myHtd.setValue(KeyPrefixRegionSplitPolicy.PREFIX_LENGTH_KEY, String.valueOf(2));
@@ -259,7 +259,7 @@ public class TestRegionSplitPolicy {
 
   @Test
   public void testDelimitedKeyPrefixRegionSplitPolicy() throws IOException {
-    HTableDescriptor myHtd = new HTableDescriptor();
+    HTableDescriptor myHtd = new HTableDescriptor(TableName.valueOf("foobar"));
     myHtd.setValue(HTableDescriptor.SPLIT_POLICY,
         DelimitedKeyPrefixRegionSplitPolicy.class.getName());
     myHtd.setValue(DelimitedKeyPrefixRegionSplitPolicy.DELIMITER_KEY, ",");

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
index 2c00b33..69482d1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
@@ -98,7 +98,7 @@ public class TestWALActionsListener {
       KeyValue kv = new KeyValue(b,b,b);
       WALEdit edit = new WALEdit();
       edit.add(kv);
-      HTableDescriptor htd = new HTableDescriptor();
+      HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(SOME_BYTES));
       htd.addFamily(new HColumnDescriptor(b));
 
       final long txid = wal.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(),

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
index 854d4c0..9315f62 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
@@ -202,7 +202,7 @@ public class TestReplicationSourceManager {
     final WAL wal = wals.getWAL(hri.getEncodedNameAsBytes());
     final AtomicLong sequenceId = new AtomicLong(1);
     manager.init();
-    HTableDescriptor htd = new HTableDescriptor();
+    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("tableame"));
     htd.addFamily(new HColumnDescriptor(f1));
     // Testing normal log rolling every 20
     for(long i = 1; i < 101; i++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2ee5f8f5/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index fdc8e45..968c5c7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -245,7 +245,7 @@ public class TestWALFactory {
     try {
       HRegionInfo info = new HRegionInfo(tableName,
                   null,null, false);
-      HTableDescriptor htd = new HTableDescriptor();
+      HTableDescriptor htd = new HTableDescriptor(tableName);
       htd.addFamily(new HColumnDescriptor(tableName.getName()));
       final WAL wal = wals.getWAL(info.getEncodedNameAsBytes());
 
@@ -366,7 +366,7 @@ public class TestWALFactory {
     final AtomicLong sequenceId = new AtomicLong(1);
     final int total = 20;
 
-    HTableDescriptor htd = new HTableDescriptor();
+    HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(tableName.getName()));
 
     for (int i = 0; i < total; i++) {
@@ -599,7 +599,7 @@ public class TestWALFactory {
     final DumbWALActionsListener visitor = new DumbWALActionsListener();
     final AtomicLong sequenceId = new AtomicLong(1);
     long timestamp = System.currentTimeMillis();
-    HTableDescriptor htd = new HTableDescriptor();
+    HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor("column"));
 
     HRegionInfo hri = new HRegionInfo(tableName,