You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by en...@apache.org on 2014/08/20 03:45:51 UTC

[1/3] HBASE-11512 Write region open/close events to WAL

Repository: hbase
Updated Branches:
  refs/heads/master aeecd2037 -> d44e7df5d


http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-protocol/src/main/protobuf/WAL.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/WAL.proto b/hbase-protocol/src/main/protobuf/WAL.proto
index 1c88b26..dae92d2 100644
--- a/hbase-protocol/src/main/protobuf/WAL.proto
+++ b/hbase-protocol/src/main/protobuf/WAL.proto
@@ -121,6 +121,29 @@ message FlushDescriptor {
 }
 
 /**
+ * Special WAL entry to hold all related to a region event (open/close).
+ */
+message RegionEventDescriptor {
+  enum EventType {
+    REGION_OPEN = 0;
+    REGION_CLOSE = 1;
+  }
+
+  message StoreDescriptor {
+    required bytes family_name = 1;
+    required string store_home_dir = 2; //relative to region dir
+    repeated string store_file = 3; // relative to store dir
+  }
+
+  required EventType event_type = 1;
+  required bytes table_name = 2;
+  required bytes encoded_region_name = 3;
+  optional uint64 log_sequence_number = 4;
+  repeated StoreDescriptor stores = 5;
+  optional ServerName server = 6;  // Server who opened the region
+}
+
+/**
  * A trailer that is appended to the end of a properly closed HLog WAL file.
  * If missing, this is either a legacy or a corrupted WAL file.
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 6f74b4d..274c1b3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -120,6 +120,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServic
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl.WriteEntry;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
@@ -725,6 +726,8 @@ public class HRegion implements HeapSize { // , Writable{
     status.setStatus("Writing region info on filesystem");
     fs.checkRegionInfoOnFilesystem();
 
+
+
     // Initialize all the HStores
     status.setStatus("Initializing all the Stores");
     long maxSeqId = initializeRegionStores(reporter, status);
@@ -761,6 +764,7 @@ public class HRegion implements HeapSize { // , Writable{
       // overlaps used sequence numbers
       nextSeqid += this.flushPerChanges + 10000000; // add another extra 10million
     }
+
     LOG.info("Onlined " + this.getRegionInfo().getShortNameToLog() +
       "; next sequenceid=" + nextSeqid);
 
@@ -850,6 +854,44 @@ public class HRegion implements HeapSize { // , Writable{
     return maxSeqId;
   }
 
+  private void writeRegionOpenMarker(HLog log, long openSeqId) throws IOException {
+    Map<byte[], List<Path>> storeFiles
+    = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
+    for (Map.Entry<byte[], Store> entry : getStores().entrySet()) {
+      Store store = entry.getValue();
+      ArrayList<Path> storeFileNames = new ArrayList<Path>();
+      for (StoreFile storeFile: store.getStorefiles()) {
+        storeFileNames.add(storeFile.getPath());
+      }
+      storeFiles.put(entry.getKey(), storeFileNames);
+    }
+
+    RegionEventDescriptor regionOpenDesc = ProtobufUtil.toRegionEventDescriptor(
+      RegionEventDescriptor.EventType.REGION_OPEN, getRegionInfo(), openSeqId,
+      getRegionServerServices().getServerName(), storeFiles);
+    HLogUtil.writeRegionEventMarker(log, getTableDesc(), getRegionInfo(), regionOpenDesc,
+      getSequenceId());
+  }
+
+  private void writeRegionCloseMarker(HLog log) throws IOException {
+    Map<byte[], List<Path>> storeFiles
+    = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
+    for (Map.Entry<byte[], Store> entry : getStores().entrySet()) {
+      Store store = entry.getValue();
+      ArrayList<Path> storeFileNames = new ArrayList<Path>();
+      for (StoreFile storeFile: store.getStorefiles()) {
+        storeFileNames.add(storeFile.getPath());
+      }
+      storeFiles.put(entry.getKey(), storeFileNames);
+    }
+
+    RegionEventDescriptor regionEventDesc = ProtobufUtil.toRegionEventDescriptor(
+      RegionEventDescriptor.EventType.REGION_CLOSE, getRegionInfo(), getSequenceId().get(),
+      getRegionServerServices().getServerName(), storeFiles);
+    HLogUtil.writeRegionEventMarker(log, getTableDesc(), getRegionInfo(), regionEventDesc,
+      getSequenceId());
+  }
+
   /**
    * @return True if this region has references.
    */
@@ -1227,6 +1269,12 @@ public class HRegion implements HeapSize { // , Writable{
           storeCloserThreadPool.shutdownNow();
         }
       }
+
+      status.setStatus("Writing region close event to WAL");
+      if (!abort  && log != null && getRegionServerServices() != null) {
+        writeRegionCloseMarker(log);
+      }
+
       this.closed.set(true);
       if (memstoreSize.get() != 0) LOG.error("Memstore size is " + memstoreSize.get());
       if (coprocessorHost != null) {
@@ -3550,6 +3598,7 @@ public class HRegion implements HeapSize { // , Writable{
     }
     return storeFileNames;
   }
+
   //////////////////////////////////////////////////////////////////////////////
   // Support code
   //////////////////////////////////////////////////////////////////////////////
@@ -4601,6 +4650,9 @@ public class HRegion implements HeapSize { // , Writable{
 
     this.openSeqNum = initialize(reporter);
     this.setSequenceId(openSeqNum);
+    if (log != null && getRegionServerServices() != null) {
+      writeRegionOpenMarker(log, openSeqNum);
+    }
     return this;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java
index 2c4652b..ce353b6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
 import org.apache.hadoop.hbase.util.FSUtils;
 
 import com.google.protobuf.TextFormat;
@@ -284,4 +285,20 @@ public class HLogUtil {
     }
     return trx;
   }
+
+  /**
+   * Write a region open marker indicating that the region is opened
+   */
+  public static long writeRegionEventMarker(HLog log, HTableDescriptor htd, HRegionInfo info,
+      final RegionEventDescriptor r, AtomicLong sequenceId) throws IOException {
+    TableName tn = TableName.valueOf(r.getTableName().toByteArray());
+    HLogKey key = new HLogKey(info.getEncodedNameAsBytes(), tn);
+    long trx = log.appendNoSync(htd, info, key, WALEdit.createRegionEventWALEdit(info, r),
+      sequenceId, false, null);
+    log.sync(trx);
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("Appended region event marker " + TextFormat.shortDebugString(r));
+    }
+    return trx;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
index f684d7d..4b38027 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.codec.Codec;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -86,6 +87,7 @@ public class WALEdit implements Writable, HeapSize {
   static final byte [] METAROW = Bytes.toBytes("METAROW");
   static final byte[] COMPACTION = Bytes.toBytes("HBASE::COMPACTION");
   static final byte [] FLUSH = Bytes.toBytes("HBASE::FLUSH");
+  static final byte [] REGION_EVENT = Bytes.toBytes("HBASE::REGION_EVENT");
 
   private final int VERSION_2 = -1;
   private final boolean isReplay;
@@ -277,6 +279,20 @@ public class WALEdit implements Writable, HeapSize {
     return null;
   }
 
+  public static WALEdit createRegionEventWALEdit(HRegionInfo hri,
+      RegionEventDescriptor regionEventDesc) {
+    KeyValue kv = new KeyValue(getRowForRegion(hri), METAFAMILY, REGION_EVENT,
+      EnvironmentEdgeManager.currentTimeMillis(), regionEventDesc.toByteArray());
+    return new WALEdit().add(kv);
+  }
+
+  public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException {
+    if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) {
+      return RegionEventDescriptor.parseFrom(cell.getValue());
+    }
+    return null;
+  }
+
   /**
    * Create a compacion WALEdit
    * @param c

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index 5735eaa..289b630 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -116,7 +116,7 @@ public class TestDistributedLogSplitting {
     Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG);
 
     // test ThreeRSAbort fails under hadoop2 (2.0.2-alpha) if shortcircuit-read (scr) is on. this
-    // turns it off for this test.  TODO: Figure out why scr breaks recovery. 
+    // turns it off for this test.  TODO: Figure out why scr breaks recovery.
     System.setProperty("hbase.tests.use.shortcircuit.reads", "false");
 
   }
@@ -176,7 +176,7 @@ public class TestDistributedLogSplitting {
     // refresh configuration
     conf = HBaseConfiguration.create(originalConf);
   }
-  
+
   @After
   public void after() throws Exception {
     try {
@@ -191,7 +191,7 @@ public class TestDistributedLogSplitting {
       ZKUtil.deleteNodeRecursively(TEST_UTIL.getZooKeeperWatcher(), "/hbase");
     }
   }
-  
+
   @Test (timeout=300000)
   public void testRecoveredEdits() throws Exception {
     LOG.info("testRecoveredEdits");
@@ -482,7 +482,7 @@ public class TestDistributedLogSplitting {
     ht.close();
     zkw.close();
   }
-  
+
   @Test(timeout = 300000)
   public void testMasterStartsUpWithLogReplayWork() throws Exception {
     LOG.info("testMasterStartsUpWithLogReplayWork");
@@ -704,7 +704,7 @@ public class TestDistributedLogSplitting {
 
     this.prepareData(ht, Bytes.toBytes("family"), Bytes.toBytes("c1"));
     String originalCheckSum = TEST_UTIL.checksumRows(ht);
-    
+
     // abort RA and trigger replay
     abortRSAndWaitForRecovery(hrs, zkw, NUM_REGIONS_TO_CREATE);
 
@@ -777,10 +777,10 @@ public class TestDistributedLogSplitting {
     }
     makeHLog(hrs.getWAL(), regions, "disableTable", "family", NUM_LOG_LINES, 100, false);
     makeHLog(hrs.getWAL(), regions, "table", "family", NUM_LOG_LINES, 100);
-    
+
     LOG.info("Disabling table\n");
     TEST_UTIL.getHBaseAdmin().disableTable(TableName.valueOf("disableTable"));
-    
+
     // abort RS
     LOG.info("Aborting region server: " + hrs.getServerName());
     hrs.abort("testing");
@@ -837,7 +837,7 @@ public class TestDistributedLogSplitting {
     assertEquals(NUM_LOG_LINES, count);
     LOG.info("Verify replayed edits");
     assertEquals(NUM_LOG_LINES, TEST_UTIL.countRows(ht));
-    
+
     // clean up
     for (HRegionInfo hri : regions) {
       Path editsdir =
@@ -879,7 +879,7 @@ public class TestDistributedLogSplitting {
       dstRS = rsts.get((i+1) % NUM_RS).getRegionServer();
       break;
     }
-    
+
     slm.markRegionsRecoveringInZK(hrs.getServerName(), regionSet);
     // move region in order for the region opened in recovering state
     final HRegionInfo hri = region;
@@ -896,7 +896,7 @@ public class TestDistributedLogSplitting {
         return (sn != null && sn.equals(tmpRS.getServerName()));
       }
     });
-    
+
     try {
       byte[] key = region.getStartKey();
       if (key == null || key.length == 0) {
@@ -955,6 +955,7 @@ public class TestDistributedLogSplitting {
       "table", "family", NUM_LOG_LINES, 100);
 
     new Thread() {
+      @Override
       public void run() {
         waitForCounter(tot_wkr_task_acquired, 0, 1, 1000);
         for (RegionServerThread rst : rsts) {
@@ -1145,7 +1146,7 @@ public class TestDistributedLogSplitting {
     assertTrue(isMetaRegionInRecovery);
 
     master.getMasterFileSystem().splitMetaLog(hrs.getServerName());
-    
+
     isMetaRegionInRecovery = false;
     recoveringRegions =
         zkw.getRecoverableZooKeeper().getChildren(zkw.recoveringRegionsZNode, false);
@@ -1317,7 +1318,7 @@ public class TestDistributedLogSplitting {
       WALEdit e = new WALEdit();
       value++;
       e.add(new KeyValue(row, family, qualifier, timeStamp, Bytes.toBytes(value)));
-      hrs.getWAL().append(curRegionInfo, TableName.valueOf(tableName), e, 
+      hrs.getWAL().append(curRegionInfo, TableName.valueOf(tableName), e,
         System.currentTimeMillis(), htd, sequenceId);
     }
     hrs.getWAL().sync();
@@ -1325,7 +1326,7 @@ public class TestDistributedLogSplitting {
 
     // wait for abort completes
     this.abortRSAndWaitForRecovery(hrs, zkw, NUM_REGIONS_TO_CREATE);
- 
+
     // verify we got the last value
     LOG.info("Verification Starts...");
     Get g = new Get(row);
@@ -1337,7 +1338,7 @@ public class TestDistributedLogSplitting {
     LOG.info("Verification after flush...");
     TEST_UTIL.getHBaseAdmin().flush(tableName);
     TEST_UTIL.getHBaseAdmin().compact(tableName);
-    
+
     // wait for compaction completes
     TEST_UTIL.waitFor(30000, 200, new Waiter.Predicate<Exception>() {
       @Override
@@ -1356,7 +1357,7 @@ public class TestDistributedLogSplitting {
     return installTable(zkw, tname, fname, nrs, 0);
   }
 
-  HTable installTable(ZooKeeperWatcher zkw, String tname, String fname, int nrs, 
+  HTable installTable(ZooKeeperWatcher zkw, String tname, String fname, int nrs,
       int existingRegions) throws Exception {
     // Create a table with regions
     TableName table = TableName.valueOf(tname);
@@ -1497,8 +1498,11 @@ public class TestDistributedLogSplitting {
   throws IOException {
     int count = 0;
     HLog.Reader in = HLogFactory.createReader(fs, log, conf);
-    while (in.next() != null) {
-      count++;
+    HLog.Entry e;
+    while ((e = in.next()) != null) {
+      if (!WALEdit.isMetaEditFamily(e.getEdit().getKeyValues().get(0))) {
+        count++;
+      }
     }
     return count;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 3b98533..9fa430f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -34,8 +34,10 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Matchers.anyLong;
 import static org.mockito.Matchers.argThat;
+import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
@@ -86,6 +88,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
 import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.Append;
@@ -114,8 +117,10 @@ import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
 import org.apache.hadoop.hbase.regionserver.HRegion.RowLock;
 import org.apache.hadoop.hbase.regionserver.TestStore.FaultyFileSystem;
@@ -141,6 +146,7 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
+import org.mockito.ArgumentCaptor;
 import org.mockito.ArgumentMatcher;
 import org.mockito.Mockito;
 
@@ -5481,6 +5487,136 @@ public class TestHRegion {
     this.region = null;
   }
 
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testOpenRegionWrittenToWAL() throws Exception {
+    final ServerName serverName = ServerName.valueOf("testOpenRegionWrittenToWAL", 100, 42);
+    final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
+
+    HTableDescriptor htd
+        = new HTableDescriptor(TableName.valueOf("testOpenRegionWrittenToWAL"));
+    htd.addFamily(new HColumnDescriptor(fam1));
+    htd.addFamily(new HColumnDescriptor(fam2));
+
+    HRegionInfo hri = new HRegionInfo(htd.getTableName(),
+      HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
+
+    // open the region w/o rss and log and flush some files
+    HRegion region =
+         HRegion.createHRegion(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL
+            .getConfiguration(), htd);
+    assertNotNull(region);
+
+    // create a file in fam1 for the region before opening in OpenRegionHandler
+    region.put(new Put(Bytes.toBytes("a")).add(fam1, fam1, fam1));
+    region.flushcache();
+    region.close();
+
+    ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
+
+    // capture appendNoSync() calls
+    HLog log = mock(HLog.class);
+    when(rss.getWAL((HRegionInfo) any())).thenReturn(log);
+
+    try {
+      region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
+        TEST_UTIL.getConfiguration(), rss, null);
+
+      verify(log, times(1)).appendNoSync((HTableDescriptor)any(), (HRegionInfo)any(), (HLogKey)any()
+        , editCaptor.capture(), (AtomicLong)any(), anyBoolean(), (List<KeyValue>)any());
+
+      WALEdit edit = editCaptor.getValue();
+      assertNotNull(edit);
+      assertNotNull(edit.getKeyValues());
+      assertEquals(1, edit.getKeyValues().size());
+      RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getKeyValues().get(0));
+      assertNotNull(desc);
+
+      LOG.info("RegionEventDescriptor from WAL: " + desc);
+
+      assertEquals(RegionEventDescriptor.EventType.REGION_OPEN, desc.getEventType());
+      assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getName()));
+      assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
+        hri.getEncodedNameAsBytes()));
+      assertTrue(desc.getLogSequenceNumber() > 0);
+      assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
+      assertEquals(2, desc.getStoresCount());
+
+      StoreDescriptor store = desc.getStores(0);
+      assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
+      assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
+      assertEquals(1, store.getStoreFileCount()); // 1store file
+      assertFalse(store.getStoreFile(0).contains("/")); // ensure path is relative
+
+      store = desc.getStores(1);
+      assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
+      assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
+      assertEquals(0, store.getStoreFileCount()); // no store files
+
+    } finally {
+      HRegion.closeHRegion(region);
+    }
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testCloseRegionWrittenToWAL() throws Exception {
+    final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42);
+    final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
+
+    HTableDescriptor htd
+    = new HTableDescriptor(TableName.valueOf("testOpenRegionWrittenToWAL"));
+    htd.addFamily(new HColumnDescriptor(fam1));
+    htd.addFamily(new HColumnDescriptor(fam2));
+
+    HRegionInfo hri = new HRegionInfo(htd.getTableName(),
+      HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
+
+    ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
+
+    // capture appendNoSync() calls
+    HLog log = mock(HLog.class);
+    when(rss.getWAL((HRegionInfo) any())).thenReturn(log);
+
+    // open a region first so that it can be closed later
+    region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
+      TEST_UTIL.getConfiguration(), rss, null);
+
+    // close the region
+    region.close(false);
+
+    // 2 times, one for region open, the other close region
+    verify(log, times(2)).appendNoSync((HTableDescriptor)any(), (HRegionInfo)any(), (HLogKey)any(),
+      editCaptor.capture(), (AtomicLong)any(), anyBoolean(), (List<KeyValue>)any());
+
+    WALEdit edit = editCaptor.getAllValues().get(1);
+    assertNotNull(edit);
+    assertNotNull(edit.getKeyValues());
+    assertEquals(1, edit.getKeyValues().size());
+    RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getKeyValues().get(0));
+    assertNotNull(desc);
+
+    LOG.info("RegionEventDescriptor from WAL: " + desc);
+
+    assertEquals(RegionEventDescriptor.EventType.REGION_CLOSE, desc.getEventType());
+    assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getName()));
+    assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
+      hri.getEncodedNameAsBytes()));
+    assertTrue(desc.getLogSequenceNumber() > 0);
+    assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
+    assertEquals(2, desc.getStoresCount());
+
+    StoreDescriptor store = desc.getStores(0);
+    assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
+    assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
+    assertEquals(0, store.getStoreFileCount()); // no store files
+
+    store = desc.getStores(1);
+    assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
+    assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
+    assertEquals(0, store.getStoreFileCount()); // no store files
+  }
+
   private static HRegion initHRegion(byte[] tableName, String callingMethod,
       byte[]... families) throws IOException {
     return initHRegion(tableName, callingMethod, HBaseConfiguration.create(),

http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
index ea368df..0edad8b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionserver.wal;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
@@ -100,7 +101,7 @@ public class TestWALReplay {
   private FileSystem fs;
   private Configuration conf;
   private RecoveryMode mode;
-  
+
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
@@ -131,7 +132,7 @@ public class TestWALReplay {
     if (TEST_UTIL.getDFSCluster().getFileSystem().exists(this.hbaseRootDir)) {
       TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseRootDir, true);
     }
-    this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? 
+    this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ?
         RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
   }
 
@@ -152,7 +153,7 @@ public class TestWALReplay {
   }
 
   /**
-   * 
+   *
    * @throws Exception
    */
   @Test
@@ -354,6 +355,7 @@ public class TestWALReplay {
     User user = HBaseTestingUtility.getDifferentUser(newConf,
         tableName.getNameAsString());
     user.runAs(new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         runWALSplit(newConf);
         HLog wal2 = createWAL(newConf);
@@ -425,6 +427,7 @@ public class TestWALReplay {
     User user = HBaseTestingUtility.getDifferentUser(newConf,
         tableName.getNameAsString());
     user.runAs(new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         runWALSplit(newConf);
         HLog wal2 = createWAL(newConf);
@@ -518,6 +521,7 @@ public class TestWALReplay {
     User user = HBaseTestingUtility.getDifferentUser(newConf,
       tableName.getNameAsString());
     user.runAs(new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         runWALSplit(newConf);
         FileSystem newFS = FileSystem.get(newConf);
@@ -669,6 +673,7 @@ public class TestWALReplay {
     HLog wal = createWAL(this.conf);
     RegionServerServices rsServices = Mockito.mock(RegionServerServices.class);
     Mockito.doReturn(false).when(rsServices).isAborted();
+    when(rsServices.getServerName()).thenReturn(ServerName.valueOf("foo", 10, 10));
     Configuration customConf = new Configuration(this.conf);
     customConf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY,
         CustomStoreFlusher.class.getName());
@@ -802,6 +807,7 @@ public class TestWALReplay {
     User user = HBaseTestingUtility.getDifferentUser(newConf,
       ".replay.wal.secondtime");
     user.runAs(new PrivilegedExceptionAction() {
+      @Override
       public Object run() throws Exception {
         runWALSplit(newConf);
         FileSystem newFS = FileSystem.get(newConf);
@@ -813,6 +819,7 @@ public class TestWALReplay {
         try {
           final HRegion region =
               new HRegion(basedir, newWal, newFS, newConf, hri, htd, null) {
+            @Override
             protected FlushResult internalFlushcache(
                 final HLog wal, final long myseqid, MonitoredTask status)
             throws IOException {
@@ -886,7 +893,7 @@ public class TestWALReplay {
     for (FileStatus fileStatus : listStatus1) {
       editCount = Integer.parseInt(fileStatus.getPath().getName());
     }
-    // The sequence number should be same 
+    // The sequence number should be same
     assertEquals(
         "The sequence number of the recoverd.edits and the current edit seq should be same",
         lastestSeqNumber, editCount);
@@ -914,7 +921,7 @@ public class TestWALReplay {
     htd.addFamily(a);
     return htd;
   }
-  
+
   private MockHLog createMockWAL(Configuration conf) throws IOException {
     MockHLog wal = new MockHLog(FileSystem.get(conf), hbaseRootDir, logName, conf);
     // Set down maximum recovery so we dfsclient doesn't linger retrying something
@@ -940,7 +947,7 @@ public class TestWALReplay {
     @Override
     public void requestDelayedFlush(HRegion region, long when) {
       // TODO Auto-generated method stub
-      
+
     }
 
     @Override
@@ -1021,7 +1028,7 @@ public class TestWALReplay {
    * @throws IOException
    */
   private HLog createWAL(final Configuration c) throws IOException {
-    HLog wal = HLogFactory.createHLog(FileSystem.get(c), 
+    HLog wal = HLogFactory.createHLog(FileSystem.get(c),
         hbaseRootDir, logName, c);
     // Set down maximum recovery so we dfsclient doesn't linger retrying something
     // long gone.


[2/3] HBASE-11512 Write region open/close events to WAL

Posted by en...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
index c569b50..af61d47 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
@@ -7600,6 +7600,2419 @@ public final class WALProtos {
     // @@protoc_insertion_point(class_scope:FlushDescriptor)
   }
 
+  public interface RegionEventDescriptorOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required .RegionEventDescriptor.EventType event_type = 1;
+    /**
+     * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+     */
+    boolean hasEventType();
+    /**
+     * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType();
+
+    // required bytes table_name = 2;
+    /**
+     * <code>required bytes table_name = 2;</code>
+     */
+    boolean hasTableName();
+    /**
+     * <code>required bytes table_name = 2;</code>
+     */
+    com.google.protobuf.ByteString getTableName();
+
+    // required bytes encoded_region_name = 3;
+    /**
+     * <code>required bytes encoded_region_name = 3;</code>
+     */
+    boolean hasEncodedRegionName();
+    /**
+     * <code>required bytes encoded_region_name = 3;</code>
+     */
+    com.google.protobuf.ByteString getEncodedRegionName();
+
+    // optional uint64 log_sequence_number = 4;
+    /**
+     * <code>optional uint64 log_sequence_number = 4;</code>
+     */
+    boolean hasLogSequenceNumber();
+    /**
+     * <code>optional uint64 log_sequence_number = 4;</code>
+     */
+    long getLogSequenceNumber();
+
+    // repeated .RegionEventDescriptor.StoreDescriptor stores = 5;
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> 
+        getStoresList();
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index);
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    int getStoresCount();
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> 
+        getStoresOrBuilderList();
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder(
+        int index);
+
+    // optional .ServerName server = 6;
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    boolean hasServer();
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
+  }
+  /**
+   * Protobuf type {@code RegionEventDescriptor}
+   *
+   * <pre>
+   **
+   * Special WAL entry to hold all related to a region event (open/close).
+   * </pre>
+   */
+  public static final class RegionEventDescriptor extends
+      com.google.protobuf.GeneratedMessage
+      implements RegionEventDescriptorOrBuilder {
+    // Use RegionEventDescriptor.newBuilder() to construct.
+    private RegionEventDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private RegionEventDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final RegionEventDescriptor defaultInstance;
+    public static RegionEventDescriptor getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public RegionEventDescriptor getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private RegionEventDescriptor(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              int rawValue = input.readEnum();
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(1, rawValue);
+              } else {
+                bitField0_ |= 0x00000001;
+                eventType_ = value;
+              }
+              break;
+            }
+            case 18: {
+              bitField0_ |= 0x00000002;
+              tableName_ = input.readBytes();
+              break;
+            }
+            case 26: {
+              bitField0_ |= 0x00000004;
+              encodedRegionName_ = input.readBytes();
+              break;
+            }
+            case 32: {
+              bitField0_ |= 0x00000008;
+              logSequenceNumber_ = input.readUInt64();
+              break;
+            }
+            case 42: {
+              if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+                stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor>();
+                mutable_bitField0_ |= 0x00000010;
+              }
+              stores_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.PARSER, extensionRegistry));
+              break;
+            }
+            case 50: {
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
+              if (((bitField0_ & 0x00000010) == 0x00000010)) {
+                subBuilder = server_.toBuilder();
+              }
+              server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(server_);
+                server_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00000010;
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+          stores_ = java.util.Collections.unmodifiableList(stores_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<RegionEventDescriptor> PARSER =
+        new com.google.protobuf.AbstractParser<RegionEventDescriptor>() {
+      public RegionEventDescriptor parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new RegionEventDescriptor(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<RegionEventDescriptor> getParserForType() {
+      return PARSER;
+    }
+
+    /**
+     * Protobuf enum {@code RegionEventDescriptor.EventType}
+     */
+    public enum EventType
+        implements com.google.protobuf.ProtocolMessageEnum {
+      /**
+       * <code>REGION_OPEN = 0;</code>
+       */
+      REGION_OPEN(0, 0),
+      /**
+       * <code>REGION_CLOSE = 1;</code>
+       */
+      REGION_CLOSE(1, 1),
+      ;
+
+      /**
+       * <code>REGION_OPEN = 0;</code>
+       */
+      public static final int REGION_OPEN_VALUE = 0;
+      /**
+       * <code>REGION_CLOSE = 1;</code>
+       */
+      public static final int REGION_CLOSE_VALUE = 1;
+
+
+      public final int getNumber() { return value; }
+
+      public static EventType valueOf(int value) {
+        switch (value) {
+          case 0: return REGION_OPEN;
+          case 1: return REGION_CLOSE;
+          default: return null;
+        }
+      }
+
+      public static com.google.protobuf.Internal.EnumLiteMap<EventType>
+          internalGetValueMap() {
+        return internalValueMap;
+      }
+      private static com.google.protobuf.Internal.EnumLiteMap<EventType>
+          internalValueMap =
+            new com.google.protobuf.Internal.EnumLiteMap<EventType>() {
+              public EventType findValueByNumber(int number) {
+                return EventType.valueOf(number);
+              }
+            };
+
+      public final com.google.protobuf.Descriptors.EnumValueDescriptor
+          getValueDescriptor() {
+        return getDescriptor().getValues().get(index);
+      }
+      public final com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptorForType() {
+        return getDescriptor();
+      }
+      public static final com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDescriptor().getEnumTypes().get(0);
+      }
+
+      private static final EventType[] VALUES = values();
+
+      public static EventType valueOf(
+          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+        if (desc.getType() != getDescriptor()) {
+          throw new java.lang.IllegalArgumentException(
+            "EnumValueDescriptor is not for this type.");
+        }
+        return VALUES[desc.getIndex()];
+      }
+
+      private final int index;
+      private final int value;
+
+      private EventType(int index, int value) {
+        this.index = index;
+        this.value = value;
+      }
+
+      // @@protoc_insertion_point(enum_scope:RegionEventDescriptor.EventType)
+    }
+
+    public interface StoreDescriptorOrBuilder
+        extends com.google.protobuf.MessageOrBuilder {
+
+      // required bytes family_name = 1;
+      /**
+       * <code>required bytes family_name = 1;</code>
+       */
+      boolean hasFamilyName();
+      /**
+       * <code>required bytes family_name = 1;</code>
+       */
+      com.google.protobuf.ByteString getFamilyName();
+
+      // required string store_home_dir = 2;
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      boolean hasStoreHomeDir();
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      java.lang.String getStoreHomeDir();
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      com.google.protobuf.ByteString
+          getStoreHomeDirBytes();
+
+      // repeated string store_file = 3;
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      java.util.List<java.lang.String>
+      getStoreFileList();
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      int getStoreFileCount();
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      java.lang.String getStoreFile(int index);
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      com.google.protobuf.ByteString
+          getStoreFileBytes(int index);
+    }
+    /**
+     * Protobuf type {@code RegionEventDescriptor.StoreDescriptor}
+     */
+    public static final class StoreDescriptor extends
+        com.google.protobuf.GeneratedMessage
+        implements StoreDescriptorOrBuilder {
+      // Use StoreDescriptor.newBuilder() to construct.
+      private StoreDescriptor(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+        super(builder);
+        this.unknownFields = builder.getUnknownFields();
+      }
+      private StoreDescriptor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+      private static final StoreDescriptor defaultInstance;
+      public static StoreDescriptor getDefaultInstance() {
+        return defaultInstance;
+      }
+
+      public StoreDescriptor getDefaultInstanceForType() {
+        return defaultInstance;
+      }
+
+      private final com.google.protobuf.UnknownFieldSet unknownFields;
+      @java.lang.Override
+      public final com.google.protobuf.UnknownFieldSet
+          getUnknownFields() {
+        return this.unknownFields;
+      }
+      private StoreDescriptor(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        initFields();
+        int mutable_bitField0_ = 0;
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+            com.google.protobuf.UnknownFieldSet.newBuilder();
+        try {
+          boolean done = false;
+          while (!done) {
+            int tag = input.readTag();
+            switch (tag) {
+              case 0:
+                done = true;
+                break;
+              default: {
+                if (!parseUnknownField(input, unknownFields,
+                                       extensionRegistry, tag)) {
+                  done = true;
+                }
+                break;
+              }
+              case 10: {
+                bitField0_ |= 0x00000001;
+                familyName_ = input.readBytes();
+                break;
+              }
+              case 18: {
+                bitField0_ |= 0x00000002;
+                storeHomeDir_ = input.readBytes();
+                break;
+              }
+              case 26: {
+                if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+                  storeFile_ = new com.google.protobuf.LazyStringArrayList();
+                  mutable_bitField0_ |= 0x00000004;
+                }
+                storeFile_.add(input.readBytes());
+                break;
+              }
+            }
+          }
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          throw e.setUnfinishedMessage(this);
+        } catch (java.io.IOException e) {
+          throw new com.google.protobuf.InvalidProtocolBufferException(
+              e.getMessage()).setUnfinishedMessage(this);
+        } finally {
+          if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+            storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_);
+          }
+          this.unknownFields = unknownFields.build();
+          makeExtensionsImmutable();
+        }
+      }
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder.class);
+      }
+
+      public static com.google.protobuf.Parser<StoreDescriptor> PARSER =
+          new com.google.protobuf.AbstractParser<StoreDescriptor>() {
+        public StoreDescriptor parsePartialFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws com.google.protobuf.InvalidProtocolBufferException {
+          return new StoreDescriptor(input, extensionRegistry);
+        }
+      };
+
+      @java.lang.Override
+      public com.google.protobuf.Parser<StoreDescriptor> getParserForType() {
+        return PARSER;
+      }
+
+      private int bitField0_;
+      // required bytes family_name = 1;
+      public static final int FAMILY_NAME_FIELD_NUMBER = 1;
+      private com.google.protobuf.ByteString familyName_;
+      /**
+       * <code>required bytes family_name = 1;</code>
+       */
+      public boolean hasFamilyName() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required bytes family_name = 1;</code>
+       */
+      public com.google.protobuf.ByteString getFamilyName() {
+        return familyName_;
+      }
+
+      // required string store_home_dir = 2;
+      public static final int STORE_HOME_DIR_FIELD_NUMBER = 2;
+      private java.lang.Object storeHomeDir_;
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      public boolean hasStoreHomeDir() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      public java.lang.String getStoreHomeDir() {
+        java.lang.Object ref = storeHomeDir_;
+        if (ref instanceof java.lang.String) {
+          return (java.lang.String) ref;
+        } else {
+          com.google.protobuf.ByteString bs = 
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            storeHomeDir_ = s;
+          }
+          return s;
+        }
+      }
+      /**
+       * <code>required string store_home_dir = 2;</code>
+       *
+       * <pre>
+       *relative to region dir
+       * </pre>
+       */
+      public com.google.protobuf.ByteString
+          getStoreHomeDirBytes() {
+        java.lang.Object ref = storeHomeDir_;
+        if (ref instanceof java.lang.String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          storeHomeDir_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+
+      // repeated string store_file = 3;
+      public static final int STORE_FILE_FIELD_NUMBER = 3;
+      private com.google.protobuf.LazyStringList storeFile_;
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      public java.util.List<java.lang.String>
+          getStoreFileList() {
+        return storeFile_;
+      }
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      public int getStoreFileCount() {
+        return storeFile_.size();
+      }
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      public java.lang.String getStoreFile(int index) {
+        return storeFile_.get(index);
+      }
+      /**
+       * <code>repeated string store_file = 3;</code>
+       *
+       * <pre>
+       * relative to store dir
+       * </pre>
+       */
+      public com.google.protobuf.ByteString
+          getStoreFileBytes(int index) {
+        return storeFile_.getByteString(index);
+      }
+
+      private void initFields() {
+        familyName_ = com.google.protobuf.ByteString.EMPTY;
+        storeHomeDir_ = "";
+        storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+      }
+      private byte memoizedIsInitialized = -1;
+      public final boolean isInitialized() {
+        byte isInitialized = memoizedIsInitialized;
+        if (isInitialized != -1) return isInitialized == 1;
+
+        if (!hasFamilyName()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+        if (!hasStoreHomeDir()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+        memoizedIsInitialized = 1;
+        return true;
+      }
+
+      public void writeTo(com.google.protobuf.CodedOutputStream output)
+                          throws java.io.IOException {
+        getSerializedSize();
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          output.writeBytes(1, familyName_);
+        }
+        if (((bitField0_ & 0x00000002) == 0x00000002)) {
+          output.writeBytes(2, getStoreHomeDirBytes());
+        }
+        for (int i = 0; i < storeFile_.size(); i++) {
+          output.writeBytes(3, storeFile_.getByteString(i));
+        }
+        getUnknownFields().writeTo(output);
+      }
+
+      private int memoizedSerializedSize = -1;
+      public int getSerializedSize() {
+        int size = memoizedSerializedSize;
+        if (size != -1) return size;
+
+        size = 0;
+        if (((bitField0_ & 0x00000001) == 0x00000001)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBytesSize(1, familyName_);
+        }
+        if (((bitField0_ & 0x00000002) == 0x00000002)) {
+          size += com.google.protobuf.CodedOutputStream
+            .computeBytesSize(2, getStoreHomeDirBytes());
+        }
+        {
+          int dataSize = 0;
+          for (int i = 0; i < storeFile_.size(); i++) {
+            dataSize += com.google.protobuf.CodedOutputStream
+              .computeBytesSizeNoTag(storeFile_.getByteString(i));
+          }
+          size += dataSize;
+          size += 1 * getStoreFileList().size();
+        }
+        size += getUnknownFields().getSerializedSize();
+        memoizedSerializedSize = size;
+        return size;
+      }
+
+      private static final long serialVersionUID = 0L;
+      @java.lang.Override
+      protected java.lang.Object writeReplace()
+          throws java.io.ObjectStreamException {
+        return super.writeReplace();
+      }
+
+      @java.lang.Override
+      public boolean equals(final java.lang.Object obj) {
+        if (obj == this) {
+         return true;
+        }
+        if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor)) {
+          return super.equals(obj);
+        }
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) obj;
+
+        boolean result = true;
+        result = result && (hasFamilyName() == other.hasFamilyName());
+        if (hasFamilyName()) {
+          result = result && getFamilyName()
+              .equals(other.getFamilyName());
+        }
+        result = result && (hasStoreHomeDir() == other.hasStoreHomeDir());
+        if (hasStoreHomeDir()) {
+          result = result && getStoreHomeDir()
+              .equals(other.getStoreHomeDir());
+        }
+        result = result && getStoreFileList()
+            .equals(other.getStoreFileList());
+        result = result &&
+            getUnknownFields().equals(other.getUnknownFields());
+        return result;
+      }
+
+      private int memoizedHashCode = 0;
+      @java.lang.Override
+      public int hashCode() {
+        if (memoizedHashCode != 0) {
+          return memoizedHashCode;
+        }
+        int hash = 41;
+        hash = (19 * hash) + getDescriptorForType().hashCode();
+        if (hasFamilyName()) {
+          hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
+          hash = (53 * hash) + getFamilyName().hashCode();
+        }
+        if (hasStoreHomeDir()) {
+          hash = (37 * hash) + STORE_HOME_DIR_FIELD_NUMBER;
+          hash = (53 * hash) + getStoreHomeDir().hashCode();
+        }
+        if (getStoreFileCount() > 0) {
+          hash = (37 * hash) + STORE_FILE_FIELD_NUMBER;
+          hash = (53 * hash) + getStoreFileList().hashCode();
+        }
+        hash = (29 * hash) + getUnknownFields().hashCode();
+        memoizedHashCode = hash;
+        return hash;
+      }
+
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          com.google.protobuf.ByteString data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          com.google.protobuf.ByteString data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(byte[] data)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          byte[] data,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return PARSER.parseFrom(data, extensionRegistry);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseDelimitedFrom(java.io.InputStream input)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseDelimitedFrom(
+          java.io.InputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          com.google.protobuf.CodedInputStream input)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input);
+      }
+      public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parseFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        return PARSER.parseFrom(input, extensionRegistry);
+      }
+
+      public static Builder newBuilder() { return Builder.create(); }
+      public Builder newBuilderForType() { return newBuilder(); }
+      public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor prototype) {
+        return newBuilder().mergeFrom(prototype);
+      }
+      public Builder toBuilder() { return newBuilder(this); }
+
+      @java.lang.Override
+      protected Builder newBuilderForType(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        Builder builder = new Builder(parent);
+        return builder;
+      }
+      /**
+       * Protobuf type {@code RegionEventDescriptor.StoreDescriptor}
+       */
+      public static final class Builder extends
+          com.google.protobuf.GeneratedMessage.Builder<Builder>
+         implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder {
+        public static final com.google.protobuf.Descriptors.Descriptor
+            getDescriptor() {
+          return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor;
+        }
+
+        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+            internalGetFieldAccessorTable() {
+          return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_fieldAccessorTable
+              .ensureFieldAccessorsInitialized(
+                  org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder.class);
+        }
+
+        // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.newBuilder()
+        private Builder() {
+          maybeForceBuilderInitialization();
+        }
+
+        private Builder(
+            com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          super(parent);
+          maybeForceBuilderInitialization();
+        }
+        private void maybeForceBuilderInitialization() {
+          if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          }
+        }
+        private static Builder create() {
+          return new Builder();
+        }
+
+        public Builder clear() {
+          super.clear();
+          familyName_ = com.google.protobuf.ByteString.EMPTY;
+          bitField0_ = (bitField0_ & ~0x00000001);
+          storeHomeDir_ = "";
+          bitField0_ = (bitField0_ & ~0x00000002);
+          storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+          bitField0_ = (bitField0_ & ~0x00000004);
+          return this;
+        }
+
+        public Builder clone() {
+          return create().mergeFrom(buildPartial());
+        }
+
+        public com.google.protobuf.Descriptors.Descriptor
+            getDescriptorForType() {
+          return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_StoreDescriptor_descriptor;
+        }
+
+        public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getDefaultInstanceForType() {
+          return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance();
+        }
+
+        public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor build() {
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor result = buildPartial();
+          if (!result.isInitialized()) {
+            throw newUninitializedMessageException(result);
+          }
+          return result;
+        }
+
+        public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor buildPartial() {
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor(this);
+          int from_bitField0_ = bitField0_;
+          int to_bitField0_ = 0;
+          if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+            to_bitField0_ |= 0x00000001;
+          }
+          result.familyName_ = familyName_;
+          if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+            to_bitField0_ |= 0x00000002;
+          }
+          result.storeHomeDir_ = storeHomeDir_;
+          if (((bitField0_ & 0x00000004) == 0x00000004)) {
+            storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(
+                storeFile_);
+            bitField0_ = (bitField0_ & ~0x00000004);
+          }
+          result.storeFile_ = storeFile_;
+          result.bitField0_ = to_bitField0_;
+          onBuilt();
+          return result;
+        }
+
+        public Builder mergeFrom(com.google.protobuf.Message other) {
+          if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) {
+            return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor)other);
+          } else {
+            super.mergeFrom(other);
+            return this;
+          }
+        }
+
+        public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor other) {
+          if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance()) return this;
+          if (other.hasFamilyName()) {
+            setFamilyName(other.getFamilyName());
+          }
+          if (other.hasStoreHomeDir()) {
+            bitField0_ |= 0x00000002;
+            storeHomeDir_ = other.storeHomeDir_;
+            onChanged();
+          }
+          if (!other.storeFile_.isEmpty()) {
+            if (storeFile_.isEmpty()) {
+              storeFile_ = other.storeFile_;
+              bitField0_ = (bitField0_ & ~0x00000004);
+            } else {
+              ensureStoreFileIsMutable();
+              storeFile_.addAll(other.storeFile_);
+            }
+            onChanged();
+          }
+          this.mergeUnknownFields(other.getUnknownFields());
+          return this;
+        }
+
+        public final boolean isInitialized() {
+          if (!hasFamilyName()) {
+            
+            return false;
+          }
+          if (!hasStoreHomeDir()) {
+            
+            return false;
+          }
+          return true;
+        }
+
+        public Builder mergeFrom(
+            com.google.protobuf.CodedInputStream input,
+            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+            throws java.io.IOException {
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor parsedMessage = null;
+          try {
+            parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+            parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor) e.getUnfinishedMessage();
+            throw e;
+          } finally {
+            if (parsedMessage != null) {
+              mergeFrom(parsedMessage);
+            }
+          }
+          return this;
+        }
+        private int bitField0_;
+
+        // required bytes family_name = 1;
+        private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
+        /**
+         * <code>required bytes family_name = 1;</code>
+         */
+        public boolean hasFamilyName() {
+          return ((bitField0_ & 0x00000001) == 0x00000001);
+        }
+        /**
+         * <code>required bytes family_name = 1;</code>
+         */
+        public com.google.protobuf.ByteString getFamilyName() {
+          return familyName_;
+        }
+        /**
+         * <code>required bytes family_name = 1;</code>
+         */
+        public Builder setFamilyName(com.google.protobuf.ByteString value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000001;
+          familyName_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>required bytes family_name = 1;</code>
+         */
+        public Builder clearFamilyName() {
+          bitField0_ = (bitField0_ & ~0x00000001);
+          familyName_ = getDefaultInstance().getFamilyName();
+          onChanged();
+          return this;
+        }
+
+        // required string store_home_dir = 2;
+        private java.lang.Object storeHomeDir_ = "";
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public boolean hasStoreHomeDir() {
+          return ((bitField0_ & 0x00000002) == 0x00000002);
+        }
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public java.lang.String getStoreHomeDir() {
+          java.lang.Object ref = storeHomeDir_;
+          if (!(ref instanceof java.lang.String)) {
+            java.lang.String s = ((com.google.protobuf.ByteString) ref)
+                .toStringUtf8();
+            storeHomeDir_ = s;
+            return s;
+          } else {
+            return (java.lang.String) ref;
+          }
+        }
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public com.google.protobuf.ByteString
+            getStoreHomeDirBytes() {
+          java.lang.Object ref = storeHomeDir_;
+          if (ref instanceof String) {
+            com.google.protobuf.ByteString b = 
+                com.google.protobuf.ByteString.copyFromUtf8(
+                    (java.lang.String) ref);
+            storeHomeDir_ = b;
+            return b;
+          } else {
+            return (com.google.protobuf.ByteString) ref;
+          }
+        }
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public Builder setStoreHomeDir(
+            java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+          storeHomeDir_ = value;
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public Builder clearStoreHomeDir() {
+          bitField0_ = (bitField0_ & ~0x00000002);
+          storeHomeDir_ = getDefaultInstance().getStoreHomeDir();
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>required string store_home_dir = 2;</code>
+         *
+         * <pre>
+         *relative to region dir
+         * </pre>
+         */
+        public Builder setStoreHomeDirBytes(
+            com.google.protobuf.ByteString value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+          storeHomeDir_ = value;
+          onChanged();
+          return this;
+        }
+
+        // repeated string store_file = 3;
+        private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+        private void ensureStoreFileIsMutable() {
+          if (!((bitField0_ & 0x00000004) == 0x00000004)) {
+            storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_);
+            bitField0_ |= 0x00000004;
+           }
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public java.util.List<java.lang.String>
+            getStoreFileList() {
+          return java.util.Collections.unmodifiableList(storeFile_);
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public int getStoreFileCount() {
+          return storeFile_.size();
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public java.lang.String getStoreFile(int index) {
+          return storeFile_.get(index);
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public com.google.protobuf.ByteString
+            getStoreFileBytes(int index) {
+          return storeFile_.getByteString(index);
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public Builder setStoreFile(
+            int index, java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureStoreFileIsMutable();
+          storeFile_.set(index, value);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public Builder addStoreFile(
+            java.lang.String value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureStoreFileIsMutable();
+          storeFile_.add(value);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public Builder addAllStoreFile(
+            java.lang.Iterable<java.lang.String> values) {
+          ensureStoreFileIsMutable();
+          super.addAll(values, storeFile_);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public Builder clearStoreFile() {
+          storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+          bitField0_ = (bitField0_ & ~0x00000004);
+          onChanged();
+          return this;
+        }
+        /**
+         * <code>repeated string store_file = 3;</code>
+         *
+         * <pre>
+         * relative to store dir
+         * </pre>
+         */
+        public Builder addStoreFileBytes(
+            com.google.protobuf.ByteString value) {
+          if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureStoreFileIsMutable();
+          storeFile_.add(value);
+          onChanged();
+          return this;
+        }
+
+        // @@protoc_insertion_point(builder_scope:RegionEventDescriptor.StoreDescriptor)
+      }
+
+      static {
+        defaultInstance = new StoreDescriptor(true);
+        defaultInstance.initFields();
+      }
+
+      // @@protoc_insertion_point(class_scope:RegionEventDescriptor.StoreDescriptor)
+    }
+
+    private int bitField0_;
+    // required .RegionEventDescriptor.EventType event_type = 1;
+    public static final int EVENT_TYPE_FIELD_NUMBER = 1;
+    private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_;
+    /**
+     * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+     */
+    public boolean hasEventType() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
+      return eventType_;
+    }
+
+    // required bytes table_name = 2;
+    public static final int TABLE_NAME_FIELD_NUMBER = 2;
+    private com.google.protobuf.ByteString tableName_;
+    /**
+     * <code>required bytes table_name = 2;</code>
+     */
+    public boolean hasTableName() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>required bytes table_name = 2;</code>
+     */
+    public com.google.protobuf.ByteString getTableName() {
+      return tableName_;
+    }
+
+    // required bytes encoded_region_name = 3;
+    public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 3;
+    private com.google.protobuf.ByteString encodedRegionName_;
+    /**
+     * <code>required bytes encoded_region_name = 3;</code>
+     */
+    public boolean hasEncodedRegionName() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>required bytes encoded_region_name = 3;</code>
+     */
+    public com.google.protobuf.ByteString getEncodedRegionName() {
+      return encodedRegionName_;
+    }
+
+    // optional uint64 log_sequence_number = 4;
+    public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 4;
+    private long logSequenceNumber_;
+    /**
+     * <code>optional uint64 log_sequence_number = 4;</code>
+     */
+    public boolean hasLogSequenceNumber() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional uint64 log_sequence_number = 4;</code>
+     */
+    public long getLogSequenceNumber() {
+      return logSequenceNumber_;
+    }
+
+    // repeated .RegionEventDescriptor.StoreDescriptor stores = 5;
+    public static final int STORES_FIELD_NUMBER = 5;
+    private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> stores_;
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> getStoresList() {
+      return stores_;
+    }
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> 
+        getStoresOrBuilderList() {
+      return stores_;
+    }
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    public int getStoresCount() {
+      return stores_.size();
+    }
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index) {
+      return stores_.get(index);
+    }
+    /**
+     * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder(
+        int index) {
+      return stores_.get(index);
+    }
+
+    // optional .ServerName server = 6;
+    public static final int SERVER_FIELD_NUMBER = 6;
+    private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    public boolean hasServer() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
+      return server_;
+    }
+    /**
+     * <code>optional .ServerName server = 6;</code>
+     *
+     * <pre>
+     * Server who opened the region
+     * </pre>
+     */
+    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
+      return server_;
+    }
+
+    private void initFields() {
+      eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
+      tableName_ = com.google.protobuf.ByteString.EMPTY;
+      encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      logSequenceNumber_ = 0L;
+      stores_ = java.util.Collections.emptyList();
+      server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasEventType()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasTableName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      if (!hasEncodedRegionName()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      for (int i = 0; i < getStoresCount(); i++) {
+        if (!getStores(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      if (hasServer()) {
+        if (!getServer().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeEnum(1, eventType_.getNumber());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeBytes(2, tableName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeBytes(3, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeUInt64(4, logSequenceNumber_);
+      }
+      for (int i = 0; i < stores_.size(); i++) {
+        output.writeMessage(5, stores_.get(i));
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeMessage(6, server_);
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeEnumSize(1, eventType_.getNumber());
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(2, tableName_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(3, encodedRegionName_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(4, logSequenceNumber_);
+      }
+      for (int i = 0; i < stores_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(5, stores_.get(i));
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(6, server_);
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)) {
+        return super.equals(obj);
+      }
+      org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) obj;
+
+      boolean result = true;
+      result = result && (hasEventType() == other.hasEventType());
+      if (hasEventType()) {
+        result = result &&
+            (getEventType() == other.getEventType());
+      }
+      result = result && (hasTableName() == other.hasTableName());
+      if (hasTableName()) {
+        result = result && getTableName()
+            .equals(other.getTableName());
+      }
+      result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
+      if (hasEncodedRegionName()) {
+        result = result && getEncodedRegionName()
+            .equals(other.getEncodedRegionName());
+      }
+      result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
+      if (hasLogSequenceNumber()) {
+        result = result && (getLogSequenceNumber()
+            == other.getLogSequenceNumber());
+      }
+      result = result && getStoresList()
+          .equals(other.getStoresList());
+      result = result && (hasServer() == other.hasServer());
+      if (hasServer()) {
+        result = result && getServer()
+            .equals(other.getServer());
+      }
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (hasEventType()) {
+        hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER;
+        hash = (53 * hash) + hashEnum(getEventType());
+      }
+      if (hasTableName()) {
+        hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getTableName().hashCode();
+      }
+      if (hasEncodedRegionName()) {
+        hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
+        hash = (53 * hash) + getEncodedRegionName().hashCode();
+      }
+      if (hasLogSequenceNumber()) {
+        hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
+        hash = (53 * hash) + hashLong(getLogSequenceNumber());
+      }
+      if (getStoresCount() > 0) {
+        hash = (37 * hash) + STORES_FIELD_NUMBER;
+        hash = (53 * hash) + getStoresList().hashCode();
+      }
+      if (hasServer()) {
+        hash = (37 * hash) + SERVER_FIELD_NUMBER;
+        hash = (53 * hash) + getServer().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code RegionEventDescriptor}
+     *
+     * <pre>
+     **
+     * Special WAL entry to hold all related to a region event (open/close).
+     * </pre>
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptorOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.class, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.Builder.class);
+      }
+
+      // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getStoresFieldBuilder();
+          getServerFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        tableName_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        logSequenceNumber_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000008);
+        if (storesBuilder_ == null) {
+          stores_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000010);
+        } else {
+          storesBuilder_.clear();
+        }
+        if (serverBuilder_ == null) {
+          server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+        } else {
+          serverBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000020);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_RegionEventDescriptor_descriptor;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor getDefaultInstanceForType() {
+        return org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance();
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor build() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor buildPartial() {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.eventType_ = eventType_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.tableName_ = tableName_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.encodedRegionName_ = encodedRegionName_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.logSequenceNumber_ = logSequenceNumber_;
+        if (storesBuilder_ == null) {
+          if (((bitField0_ & 0x00000010) == 0x00000010)) {
+            stores_ = java.util.Collections.unmodifiableList(stores_);
+            bitField0_ = (bitField0_ & ~0x00000010);
+          }
+          result.stores_ = stores_;
+        } else {
+          result.stores_ = storesBuilder_.build();
+        }
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        if (serverBuilder_ == null) {
+          result.server_ = server_;
+        } else {
+          result.server_ = serverBuilder_.build();
+        }
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) {
+          return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor other) {
+        if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.getDefaultInstance()) return this;
+        if (other.hasEventType()) {
+          setEventType(other.getEventType());
+        }
+        if (other.hasTableName()) {
+          setTableName(other.getTableName());
+        }
+        if (other.hasEncodedRegionName()) {
+          setEncodedRegionName(other.getEncodedRegionName());
+        }
+        if (other.hasLogSequenceNumber()) {
+          setLogSequenceNumber(other.getLogSequenceNumber());
+        }
+        if (storesBuilder_ == null) {
+          if (!other.stores_.isEmpty()) {
+            if (stores_.isEmpty()) {
+              stores_ = other.stores_;
+              bitField0_ = (bitField0_ & ~0x00000010);
+            } else {
+              ensureStoresIsMutable();
+              stores_.addAll(other.stores_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.stores_.isEmpty()) {
+            if (storesBuilder_.isEmpty()) {
+              storesBuilder_.dispose();
+              storesBuilder_ = null;
+              stores_ = other.stores_;
+              bitField0_ = (bitField0_ & ~0x00000010);
+              storesBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getStoresFieldBuilder() : null;
+            } else {
+              storesBuilder_.addAllMessages(other.stores_);
+            }
+          }
+        }
+        if (other.hasServer()) {
+          mergeServer(other.getServer());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasEventType()) {
+          
+          return false;
+        }
+        if (!hasTableName()) {
+          
+          return false;
+        }
+        if (!hasEncodedRegionName()) {
+          
+          return false;
+        }
+        for (int i = 0; i < getStoresCount(); i++) {
+          if (!getStores(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        if (hasServer()) {
+          if (!getServer().isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required .RegionEventDescriptor.EventType event_type = 1;
+      private org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
+      /**
+       * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+       */
+      public boolean hasEventType() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType getEventType() {
+        return eventType_;
+      }
+      /**
+       * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+       */
+      public Builder setEventType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        bitField0_ |= 0x00000001;
+        eventType_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required .RegionEventDescriptor.EventType event_type = 1;</code>
+       */
+      public Builder clearEventType() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        eventType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType.REGION_OPEN;
+        onChanged();
+        return this;
+      }
+
+      // required bytes table_name = 2;
+      private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>required bytes table_name = 2;</code>
+       */
+      public boolean hasTableName() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>required bytes table_name = 2;</code>
+       */
+      public com.google.protobuf.ByteString getTableName() {
+        return tableName_;
+      }
+      /**
+       * <code>required bytes table_name = 2;</code>
+       */
+      public Builder setTableName(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000002;
+        tableName_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required bytes table_name = 2;</code>
+       */
+      public Builder clearTableName() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        tableName_ = getDefaultInstance().getTableName();
+        onChanged();
+        return this;
+      }
+
+      // required bytes encoded_region_name = 3;
+      private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>required bytes encoded_region_name = 3;</code>
+       */
+      public boolean hasEncodedRegionName() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>required bytes encoded_region_name = 3;</code>
+       */
+      public com.google.protobuf.ByteString getEncodedRegionName() {
+        return encodedRegionName_;
+      }
+      /**
+       * <code>required bytes encoded_region_name = 3;</code>
+       */
+      public Builder setEncodedRegionName(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000004;
+        encodedRegionName_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required bytes encoded_region_name = 3;</code>
+       */
+      public Builder clearEncodedRegionName() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        encodedRegionName_ = getDefaultInstance().getEncodedRegionName();
+        onChanged();
+        return this;
+      }
+
+      // optional uint64 log_sequence_number = 4;
+      private long logSequenceNumber_ ;
+      /**
+       * <code>optional uint64 log_sequence_number = 4;</code>
+       */
+      public boolean hasLogSequenceNumber() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional uint64 log_sequence_number = 4;</code>
+       */
+      public long getLogSequenceNumber() {
+        return logSequenceNumber_;
+      }
+      /**
+       * <code>optional uint64 log_sequence_number = 4;</code>
+       */
+      public Builder setLogSequenceNumber(long value) {
+        bitField0_ |= 0x00000008;
+        logSequenceNumber_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional uint64 log_sequence_number = 4;</code>
+       */
+      public Builder clearLogSequenceNumber() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        logSequenceNumber_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // repeated .RegionEventDescriptor.StoreDescriptor stores = 5;
+      private java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> stores_ =
+        java.util.Collections.emptyList();
+      private void ensureStoresIsMutable() {
+        if (!((bitField0_ & 0x00000010) == 0x00000010)) {
+          stores_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor>(stores_);
+          bitField0_ |= 0x00000010;
+         }
+      }
+
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> storesBuilder_;
+
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> getStoresList() {
+        if (storesBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(stores_);
+        } else {
+          return storesBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public int getStoresCount() {
+        if (storesBuilder_ == null) {
+          return stores_.size();
+        } else {
+          return storesBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor getStores(int index) {
+        if (storesBuilder_ == null) {
+          return stores_.get(index);
+        } else {
+          return storesBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder setStores(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) {
+        if (storesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureStoresIsMutable();
+          stores_.set(index, value);
+          onChanged();
+        } else {
+          storesBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder setStores(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) {
+        if (storesBuilder_ == null) {
+          ensureStoresIsMutable();
+          stores_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          storesBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder addStores(org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) {
+        if (storesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureStoresIsMutable();
+          stores_.add(value);
+          onChanged();
+        } else {
+          storesBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder addStores(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor value) {
+        if (storesBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureStoresIsMutable();
+          stores_.add(index, value);
+          onChanged();
+        } else {
+          storesBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder addStores(
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) {
+        if (storesBuilder_ == null) {
+          ensureStoresIsMutable();
+          stores_.add(builderForValue.build());
+          onChanged();
+        } else {
+          storesBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder addStores(
+          int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder builderForValue) {
+        if (storesBuilder_ == null) {
+          ensureStoresIsMutable();
+          stores_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          storesBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder addAllStores(
+          java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor> values) {
+        if (storesBuilder_ == null) {
+          ensureStoresIsMutable();
+          super.addAll(values, stores_);
+          onChanged();
+        } else {
+          storesBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder clearStores() {
+        if (storesBuilder_ == null) {
+          stores_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000010);
+          onChanged();
+        } else {
+          storesBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public Builder removeStores(int index) {
+        if (storesBuilder_ == null) {
+          ensureStoresIsMutable();
+          stores_.remove(index);
+          onChanged();
+        } else {
+          storesBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder getStoresBuilder(
+          int index) {
+        return getStoresFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder getStoresOrBuilder(
+          int index) {
+        if (storesBuilder_ == null) {
+          return stores_.get(index);  } else {
+          return storesBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> 
+           getStoresOrBuilderList() {
+        if (storesBuilder_ != null) {
+          return storesBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(stores_);
+        }
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder addStoresBuilder() {
+        return getStoresFieldBuilder().addBuilder(
+            org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder addStoresBuilder(
+          int index) {
+        return getStoresFieldBuilder().addBuilder(
+            index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .RegionEventDescriptor.StoreDescriptor stores = 5;</code>
+       */
+      public java.util.List<org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder> 
+           getStoresBuilderList() {
+        return getStoresFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder> 
+          getStoresFieldBuilder() {
+        if (storesBuilder_ == null) {
+          storesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.StoreDescriptorOrBuilder>(
+                  stores_,
+                  ((bitField0_ & 0x00000010) == 0x00000010),
+                  getParentForChildren(),
+                  isClean());
+          stores_ = null;
+        }
+        return storesBuilder_;
+      }
+
+      // optional .ServerName server = 6;
+      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public boolean hasServer() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
+        if (serverBuilder_ == null) {
+          return server_;
+        } else {
+          return serverBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+        if (serverBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          server_ = value;
+          onChanged();
+        } else {
+          serverBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public Builder setServer(
+          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
+        if (serverBuilder_ == null) {
+          server_ = builderForValue.build();
+          onChanged();
+        } else {
+          serverBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
+        if (serverBuilder_ == null) {
+          if (((bitField0_ & 0x00000020) == 0x00000020) &&
+              server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
+            server_ =
+              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
+          } else {
+            server_ = value;
+          }
+          onChanged();
+        } else {
+          serverBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00000020;
+        return this;
+      }
+      /**
+       * <code>optional .ServerName server = 6;</code>
+       *
+       * <pre>
+       * Server who opened the region
+       * </pre>
+       */
+      public Builder clearServer() {
+        if (serverBuilder_ == null) {
+          server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
+          onChanged();
+        } else {
+          serverBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00000020

<TRUNCATED>

[3/3] git commit: HBASE-11512 Write region open/close events to WAL

Posted by en...@apache.org.
HBASE-11512 Write region open/close events to WAL


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d44e7df5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d44e7df5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d44e7df5

Branch: refs/heads/master
Commit: d44e7df5dc1e701fae4b611b9858dfd80499ee35
Parents: aeecd20
Author: Enis Soztutar <en...@apache.org>
Authored: Tue Aug 19 18:45:21 2014 -0700
Committer: Enis Soztutar <en...@apache.org>
Committed: Tue Aug 19 18:45:21 2014 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/protobuf/ProtobufUtil.java     |   26 +
 .../hbase/protobuf/generated/WALProtos.java     | 2455 +++++++++++++++++-
 hbase-protocol/src/main/protobuf/WAL.proto      |   23 +
 .../hadoop/hbase/regionserver/HRegion.java      |   52 +
 .../hadoop/hbase/regionserver/wal/HLogUtil.java |   17 +
 .../hadoop/hbase/regionserver/wal/WALEdit.java  |   16 +
 .../master/TestDistributedLogSplitting.java     |   38 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  136 +
 .../hbase/regionserver/wal/TestWALReplay.java   |   21 +-
 9 files changed, 2755 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/d44e7df5/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 4033fb5..86fe515 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -120,6 +120,8 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Regio
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
+import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.access.TablePermission;
 import org.apache.hadoop.hbase.security.access.UserPermission;
@@ -2529,6 +2531,30 @@ public final class ProtobufUtil {
     return desc.build();
   }
 
+  public static RegionEventDescriptor toRegionEventDescriptor(
+      EventType eventType, HRegionInfo hri, long seqId, ServerName server,
+      Map<byte[], List<Path>> storeFiles) {
+    RegionEventDescriptor.Builder desc = RegionEventDescriptor.newBuilder()
+        .setEventType(eventType)
+        .setTableName(ByteStringer.wrap(hri.getTable().getName()))
+        .setEncodedRegionName(ByteStringer.wrap(hri.getEncodedNameAsBytes()))
+        .setLogSequenceNumber(seqId)
+        .setServer(toServerName(server));
+
+    for (Map.Entry<byte[], List<Path>> entry : storeFiles.entrySet()) {
+      RegionEventDescriptor.StoreDescriptor.Builder builder
+        = RegionEventDescriptor.StoreDescriptor.newBuilder()
+          .setFamilyName(ByteStringer.wrap(entry.getKey()))
+          .setStoreHomeDir(Bytes.toString(entry.getKey()));
+      for (Path path : entry.getValue()) {
+        builder.addStoreFile(path.getName());
+      }
+
+      desc.addStores(builder);
+    }
+    return desc.build();
+  }
+
   /**
    * Return short version of Message toString'd, shorter than TextFormat#shortDebugString.
    * Tries to NOT print out data both because it can be big but also so we do not have data in our