You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/09/08 17:35:51 UTC

[2/5] hbase git commit: HBASE-18106 Redo ProcedureInfo and LockInfo

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
index f19195e..84fc7e5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -33,6 +31,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
@@ -166,8 +165,9 @@ public class AddColumnFamilyProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.AddColumnFamilyStateData.Builder addCFMsg =
         MasterProcedureProtos.AddColumnFamilyStateData.newBuilder()
@@ -179,15 +179,16 @@ public class AddColumnFamilyProcedure
           .setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
     }
 
-    addCFMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(addCFMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.AddColumnFamilyStateData addCFMsg =
-        MasterProcedureProtos.AddColumnFamilyStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.AddColumnFamilyStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(addCFMsg.getUserInfo()));
     tableName = ProtobufUtil.toTableName(addCFMsg.getTableName());
     cfDescriptor = ProtobufUtil.toColumnFamilyDescriptor(addCFMsg.getColumnfamilySchema());

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
index cc39f53..7e21ad6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -47,6 +45,7 @@ import org.apache.hadoop.hbase.master.MetricsSnapshot;
 import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.CreateHdfsRegions;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -244,8 +243,9 @@ public class CloneSnapshotProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.CloneSnapshotStateData.Builder cloneSnapshotMsg =
       MasterProcedureProtos.CloneSnapshotStateData.newBuilder()
@@ -271,15 +271,16 @@ public class CloneSnapshotProcedure
         cloneSnapshotMsg.addParentToChildRegionsPairList(parentToChildrenPair);
       }
     }
-    cloneSnapshotMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(cloneSnapshotMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.CloneSnapshotStateData cloneSnapshotMsg =
-      MasterProcedureProtos.CloneSnapshotStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.CloneSnapshotStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(cloneSnapshotMsg.getUserInfo()));
     snapshot = cloneSnapshotMsg.getSnapshot();
     tableDescriptor = ProtobufUtil.toTableDescriptor(cloneSnapshotMsg.getTableSchema());

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
index 7d65126..2cbf8f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
@@ -19,9 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -29,6 +26,7 @@ import org.apache.hadoop.hbase.NamespaceExistException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceState;
@@ -135,21 +133,23 @@ public class CreateNamespaceProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.CreateNamespaceStateData.Builder createNamespaceMsg =
         MasterProcedureProtos.CreateNamespaceStateData.newBuilder().setNamespaceDescriptor(
           ProtobufUtil.toProtoNamespaceDescriptor(this.nsDescriptor));
-    createNamespaceMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(createNamespaceMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.CreateNamespaceStateData createNamespaceMsg =
-        MasterProcedureProtos.CreateNamespaceStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.CreateNamespaceStateData.class);
     nsDescriptor = ProtobufUtil.toNamespaceDescriptor(createNamespaceMsg.getNamespaceDescriptor());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index 14604fd..a5c1584 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -183,8 +182,9 @@ public class CreateTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.CreateTableStateData.Builder state =
       MasterProcedureProtos.CreateTableStateData.newBuilder()
@@ -195,15 +195,16 @@ public class CreateTableProcedure
         state.addRegionInfo(HRegionInfo.convert(hri));
       }
     }
-    state.build().writeDelimitedTo(stream);
+    serializer.serialize(state.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.CreateTableStateData state =
-      MasterProcedureProtos.CreateTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.CreateTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
     tableDescriptor = ProtobufUtil.toTableDescriptor(state.getTableSchema());
     if (state.getRegionInfoCount() == 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
index 9ec814a..12726ed 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.List;
 
 import org.apache.commons.logging.Log;
@@ -32,6 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -172,8 +171,9 @@ public class DeleteColumnFamilyProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.DeleteColumnFamilyStateData.Builder deleteCFMsg =
         MasterProcedureProtos.DeleteColumnFamilyStateData.newBuilder()
@@ -185,14 +185,15 @@ public class DeleteColumnFamilyProcedure
           .setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
     }
 
-    deleteCFMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(deleteCFMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
     MasterProcedureProtos.DeleteColumnFamilyStateData deleteCFMsg =
-        MasterProcedureProtos.DeleteColumnFamilyStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.DeleteColumnFamilyStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(deleteCFMsg.getUserInfo()));
     tableName = ProtobufUtil.toTableName(deleteCFMsg.getTableName());
     familyName = deleteCFMsg.getColumnfamilyName().toByteArray();

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
index d91a6e1..a66e77c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
@@ -20,9 +20,6 @@ package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -35,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.constraint.ConstraintException;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceState;
@@ -147,8 +145,9 @@ public class DeleteNamespaceProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.DeleteNamespaceStateData.Builder deleteNamespaceMsg =
         MasterProcedureProtos.DeleteNamespaceStateData.newBuilder().setNamespaceName(namespaceName);
@@ -156,15 +155,16 @@ public class DeleteNamespaceProcedure
       deleteNamespaceMsg.setNamespaceDescriptor(
         ProtobufUtil.toProtoNamespaceDescriptor(this.nsDescriptor));
     }
-    deleteNamespaceMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(deleteNamespaceMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.DeleteNamespaceStateData deleteNamespaceMsg =
-        MasterProcedureProtos.DeleteNamespaceStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.DeleteNamespaceStateData.class);
     namespaceName = deleteNamespaceMsg.getNamespaceName();
     if (deleteNamespaceMsg.hasNamespaceDescriptor()) {
       nsDescriptor =

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
index 04dfc60..0f665f2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -48,6 +46,7 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.mob.MobConstants;
 import org.apache.hadoop.hbase.mob.MobUtils;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -203,8 +202,9 @@ public class DeleteTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.DeleteTableStateData.Builder state =
       MasterProcedureProtos.DeleteTableStateData.newBuilder()
@@ -215,15 +215,16 @@ public class DeleteTableProcedure
         state.addRegionInfo(HRegionInfo.convert(hri));
       }
     }
-    state.build().writeDelimitedTo(stream);
+    serializer.serialize(state.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.DeleteTableStateData state =
-      MasterProcedureProtos.DeleteTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.DeleteTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
     tableName = ProtobufUtil.toTableName(state.getTableName());
     if (state.getRegionInfoCount() == 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
index 0872a01..20eca94 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
@@ -19,9 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -33,6 +30,7 @@ import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.constraint.ConstraintException;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState;
@@ -171,8 +169,9 @@ public class DisableTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.DisableTableStateData.Builder disableTableMsg =
         MasterProcedureProtos.DisableTableStateData.newBuilder()
@@ -180,15 +179,16 @@ public class DisableTableProcedure
             .setTableName(ProtobufUtil.toProtoTableName(tableName))
             .setSkipTableStateCheck(skipTableStateCheck);
 
-    disableTableMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(disableTableMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.DisableTableStateData disableTableMsg =
-        MasterProcedureProtos.DisableTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.DisableTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(disableTableMsg.getUserInfo()));
     tableName = ProtobufUtil.toTableName(disableTableMsg.getTableName());
     skipTableStateCheck = disableTableMsg.getSkipTableStateCheck();

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
index 4f4b5b1..ff43d27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
@@ -19,9 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.MetaTableAccessor;
@@ -32,6 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableState;
@@ -170,8 +168,9 @@ public class EnableTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.EnableTableStateData.Builder enableTableMsg =
         MasterProcedureProtos.EnableTableStateData.newBuilder()
@@ -179,15 +178,16 @@ public class EnableTableProcedure
             .setTableName(ProtobufUtil.toProtoTableName(tableName))
             .setSkipTableStateCheck(skipTableStateCheck);
 
-    enableTableMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(enableTableMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.EnableTableStateData enableTableMsg =
-        MasterProcedureProtos.EnableTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.EnableTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(enableTableMsg.getUserInfo()));
     tableName = ProtobufUtil.toTableName(enableTableMsg.getTableName());
     skipTableStateCheck = enableTableMsg.getSkipTableStateCheck();

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 564b86d..e14c037 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hbase.master.procedure;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,7 +30,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
@@ -42,11 +39,13 @@ import org.apache.hadoop.hbase.master.locking.LockProcedure;
 import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType;
 import org.apache.hadoop.hbase.procedure2.AbstractProcedureScheduler;
 import org.apache.hadoop.hbase.procedure2.LockAndQueue;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
 import org.apache.hadoop.hbase.procedure2.LockStatus;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.LockedResourceType;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureDeque;
-import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.util.AvlUtil.AvlIterableList;
 import org.apache.hadoop.hbase.util.AvlUtil.AvlKeyComparator;
 import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode;
@@ -238,57 +237,42 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
     return pollResult;
   }
 
-  private LockInfo createLockInfo(LockInfo.ResourceType resourceType,
+  private LockedResource createLockedResource(LockedResourceType resourceType,
       String resourceName, LockAndQueue queue) {
-    LockInfo info = new LockInfo();
-
-    info.setResourceType(resourceType);
-    info.setResourceName(resourceName);
+    LockType lockType;
+    Procedure<?> exclusiveLockOwnerProcedure;
+    int sharedLockCount;
 
     if (queue.hasExclusiveLock()) {
-      info.setLockType(LockInfo.LockType.EXCLUSIVE);
-
-      Procedure<?> exclusiveLockOwnerProcedure = queue.getExclusiveLockOwnerProcedure();
-      ProcedureInfo exclusiveLockOwnerProcedureInfo =
-          ProcedureUtil.convertToProcedureInfo(exclusiveLockOwnerProcedure);
-      info.setExclusiveLockOwnerProcedure(exclusiveLockOwnerProcedureInfo);
-    } else if (queue.getSharedLockCount() > 0) {
-      info.setLockType(LockInfo.LockType.SHARED);
-      info.setSharedLockCount(queue.getSharedLockCount());
+      lockType = LockType.EXCLUSIVE;
+      exclusiveLockOwnerProcedure = queue.getExclusiveLockOwnerProcedure();
+      sharedLockCount = 0;
+    } else {
+      lockType = LockType.SHARED;
+      exclusiveLockOwnerProcedure = null;
+      sharedLockCount = queue.getSharedLockCount();
     }
 
+    List<Procedure<?>> waitingProcedures = new ArrayList<>();
+
     for (Procedure<?> procedure : queue) {
       if (!(procedure instanceof LockProcedure)) {
         continue;
       }
 
-      LockProcedure lockProcedure = (LockProcedure)procedure;
-      LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure();
-
-      switch (lockProcedure.getType()) {
-      case EXCLUSIVE:
-        waitingProcedure.setLockType(LockInfo.LockType.EXCLUSIVE);
-        break;
-      case SHARED:
-        waitingProcedure.setLockType(LockInfo.LockType.SHARED);
-        break;
-      }
-
-      ProcedureInfo procedureInfo = ProcedureUtil.convertToProcedureInfo(lockProcedure);
-      waitingProcedure.setProcedure(procedureInfo);
-
-      info.addWaitingProcedure(waitingProcedure);
+      waitingProcedures.add(procedure);
     }
 
-    return info;
+    return new LockedResource(resourceType, resourceName, lockType,
+        exclusiveLockOwnerProcedure, sharedLockCount, waitingProcedures);
   }
 
   @Override
-  public List<LockInfo> listLocks() {
+  public List<LockedResource> getLocks() {
     schedLock();
 
     try {
-      List<LockInfo> lockInfos = new ArrayList<>();
+      List<LockedResource> lockedResources = new ArrayList<>();
 
       for (Entry<ServerName, LockAndQueue> entry : locking.serverLocks
           .entrySet()) {
@@ -296,9 +280,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
         LockAndQueue queue = entry.getValue();
 
         if (queue.isLocked()) {
-          LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.SERVER,
-              serverName, queue);
-          lockInfos.add(lockInfo);
+          LockedResource lockedResource =
+            createLockedResource(LockedResourceType.SERVER, serverName, queue);
+          lockedResources.add(lockedResource);
         }
       }
 
@@ -308,9 +292,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
         LockAndQueue queue = entry.getValue();
 
         if (queue.isLocked()) {
-          LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.NAMESPACE,
-              namespaceName, queue);
-          lockInfos.add(lockInfo);
+          LockedResource lockedResource =
+            createLockedResource(LockedResourceType.NAMESPACE, namespaceName, queue);
+          lockedResources.add(lockedResource);
         }
       }
 
@@ -320,9 +304,9 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
         LockAndQueue queue = entry.getValue();
 
         if (queue.isLocked()) {
-          LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.TABLE,
-              tableName, queue);
-          lockInfos.add(lockInfo);
+          LockedResource lockedResource =
+            createLockedResource(LockedResourceType.TABLE, tableName, queue);
+          lockedResources.add(lockedResource);
         }
       }
 
@@ -331,20 +315,21 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
         LockAndQueue queue = entry.getValue();
 
         if (queue.isLocked()) {
-          LockInfo lockInfo = createLockInfo(LockInfo.ResourceType.REGION,
-              regionName, queue);
-          lockInfos.add(lockInfo);
+          LockedResource lockedResource =
+            createLockedResource(LockedResourceType.REGION, regionName, queue);
+          lockedResources.add(lockedResource);
         }
       }
 
-      return lockInfos;
+      return lockedResources;
     } finally {
       schedUnlock();
     }
   }
 
   @Override
-  public LockInfo getLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName) {
+  public LockedResource getLockResource(LockedResourceType resourceType,
+      String resourceName) {
     LockAndQueue queue = null;
     schedLock();
     try {
@@ -363,7 +348,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
           break;
       }
 
-      return queue != null ? createLockInfo(resourceType, resourceName, queue) : null;
+      return queue != null ? createLockedResource(resourceType, resourceName, queue) : null;
     } finally {
       schedUnlock();
     }
@@ -624,17 +609,17 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
   /**
    * Get lock info for a resource of specified type and name and log details
    */
-  protected void logLockInfoForResource(LockInfo.ResourceType resourceType, String resourceName) {
+  protected void logLockedResource(LockedResourceType resourceType, String resourceName) {
     if (!LOG.isDebugEnabled()) {
       return;
     }
 
-    LockInfo lockInfo = getLockInfoForResource(resourceType, resourceName);
-    if (lockInfo != null) {
+    LockedResource lockedResource = getLockResource(resourceType, resourceName);
+    if (lockedResource != null) {
       String msg = resourceType.toString() + " '" + resourceName + "', shared lock count=" +
-          lockInfo.getSharedLockCount();
+          lockedResource.getSharedLockCount();
 
-      ProcedureInfo proc = lockInfo.getExclusiveLockOwnerProcedure();
+      Procedure<?> proc = lockedResource.getExclusiveLockOwnerProcedure();
       if (proc != null) {
         msg += ", exclusively locked by procId=" + proc.getProcId();
       }
@@ -657,13 +642,13 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
       final LockAndQueue tableLock = locking.getTableLock(table);
       if (!namespaceLock.trySharedLock()) {
         waitProcedure(namespaceLock, procedure);
-        logLockInfoForResource(LockInfo.ResourceType.NAMESPACE, namespace);
+        logLockedResource(LockedResourceType.NAMESPACE, namespace);
         return true;
       }
       if (!tableLock.tryExclusiveLock(procedure)) {
         namespaceLock.releaseSharedLock();
         waitProcedure(tableLock, procedure);
-        logLockInfoForResource(LockInfo.ResourceType.TABLE, table.getNameAsString());
+        logLockedResource(LockedResourceType.TABLE, table.getNameAsString());
         return true;
       }
       removeFromRunQueue(tableRunQueue, getTableQueue(table));
@@ -920,7 +905,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
           locking.getTableLock(TableName.NAMESPACE_TABLE_NAME);
       if (!systemNamespaceTableLock.trySharedLock()) {
         waitProcedure(systemNamespaceTableLock, procedure);
-        logLockInfoForResource(LockInfo.ResourceType.TABLE,
+        logLockedResource(LockedResourceType.TABLE,
             TableName.NAMESPACE_TABLE_NAME.getNameAsString());
         return true;
       }
@@ -929,7 +914,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
       if (!namespaceLock.tryExclusiveLock(procedure)) {
         systemNamespaceTableLock.releaseSharedLock();
         waitProcedure(namespaceLock, procedure);
-        logLockInfoForResource(LockInfo.ResourceType.NAMESPACE, namespace);
+        logLockedResource(LockedResourceType.NAMESPACE, namespace);
         return true;
       }
       return false;
@@ -982,7 +967,7 @@ public class MasterProcedureScheduler extends AbstractProcedureScheduler {
         return false;
       }
       waitProcedure(lock, procedure);
-      logLockInfoForResource(LockInfo.ResourceType.SERVER, serverName.getServerName());
+      logLockedResource(LockedResourceType.SERVER, serverName.getServerName());
       return true;
     } finally {
       schedUnlock();

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
index ac86dab..8fc9d28 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
@@ -19,9 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.InvalidFamilyOperationException;
@@ -31,6 +28,7 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState;
@@ -159,8 +157,9 @@ public class ModifyColumnFamilyProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.ModifyColumnFamilyStateData.Builder modifyCFMsg =
         MasterProcedureProtos.ModifyColumnFamilyStateData.newBuilder()
@@ -172,15 +171,16 @@ public class ModifyColumnFamilyProcedure
           .setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedtableDescriptor));
     }
 
-    modifyCFMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(modifyCFMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.ModifyColumnFamilyStateData modifyCFMsg =
-        MasterProcedureProtos.ModifyColumnFamilyStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.ModifyColumnFamilyStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(modifyCFMsg.getUserInfo()));
     tableName = ProtobufUtil.toTableName(modifyCFMsg.getTableName());
     cfDescriptor = ProtobufUtil.toColumnFamilyDescriptor(modifyCFMsg.getColumnfamilySchema());

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
index 17e7197..99c18fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
@@ -19,15 +19,13 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.NamespaceNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.master.TableNamespaceManager;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceState;
@@ -130,8 +128,9 @@ public class ModifyNamespaceProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.ModifyNamespaceStateData.Builder modifyNamespaceMsg =
         MasterProcedureProtos.ModifyNamespaceStateData.newBuilder().setNamespaceDescriptor(
@@ -140,15 +139,16 @@ public class ModifyNamespaceProcedure
       modifyNamespaceMsg.setUnmodifiedNamespaceDescriptor(
         ProtobufUtil.toProtoNamespaceDescriptor(this.oldNsDescriptor));
     }
-    modifyNamespaceMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(modifyNamespaceMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.ModifyNamespaceStateData modifyNamespaceMsg =
-        MasterProcedureProtos.ModifyNamespaceStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.ModifyNamespaceStateData.class);
     newNsDescriptor =
         ProtobufUtil.toNamespaceDescriptor(modifyNamespaceMsg.getNamespaceDescriptor());
     if (modifyNamespaceMsg.hasUnmodifiedNamespaceDescriptor()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index 9741faa..0fc08c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -42,6 +40,7 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState;
@@ -185,8 +184,9 @@ public class ModifyTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.ModifyTableStateData.Builder modifyTableMsg =
         MasterProcedureProtos.ModifyTableStateData.newBuilder()
@@ -199,15 +199,16 @@ public class ModifyTableProcedure
           .setUnmodifiedTableSchema(ProtobufUtil.toTableSchema(unmodifiedTableDescriptor));
     }
 
-    modifyTableMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(modifyTableMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.ModifyTableStateData modifyTableMsg =
-        MasterProcedureProtos.ModifyTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.ModifyTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(modifyTableMsg.getUserInfo()));
     modifiedTableDescriptor = ProtobufUtil.toTableDescriptor(modifyTableMsg.getModifiedTableSchema());
     deleteColumnFamilyInModify = modifyTableMsg.getDeleteColumnFamilyInModify();

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
new file mode 100644
index 0000000..080d6ff
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.util.JRubyFormat;
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class ProcedureDescriber {
+  private ProcedureDescriber() {
+  }
+
+  private static Object parametersToObject(Procedure<?> proc) {
+    try {
+      ProcedureProtos.Procedure protoProc = ProcedureUtil.convertToProtoProcedure(proc);
+      List<Object> parameters = protoProc.getStateMessageList().stream()
+        .map((any) -> {
+          try {
+            return ProtobufMessageConverter.toJavaObject(any);
+          } catch (InvalidProtocolBufferException e) {
+            return e.toString();
+          }
+        }).collect(Collectors.toList());
+      return parameters;
+    } catch (IOException e) {
+      return e.toString();
+    }
+  }
+
+  public static String describe(Procedure<?> proc) {
+    Map<String, Object> description = new LinkedHashMap<>();
+
+    description.put("ID", proc.getProcId());
+    description.put("PARENT_ID", proc.getParentProcId());
+    description.put("STATE", proc.getState());
+    description.put("OWNER", proc.getOwner());
+    description.put("TYPE", proc.getProcName());
+    description.put("START_TIME", new Date(proc.getSubmittedTime()));
+    description.put("LAST_UPDATE", new Date(proc.getLastUpdate()));
+
+    if (proc.isFailed()) {
+      description.put("ERRORS", proc.getException().unwrapRemoteIOException().getMessage());
+    }
+    description.put("PARAMETERS", parametersToObject(proc));
+
+    return JRubyFormat.print(description);
+  }
+
+  public static String describeParameters(Procedure<?> proc) {
+    Object object = parametersToObject(proc);
+    return JRubyFormat.print(object);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
index 72f0648..7ae81ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.assignment.AssignProcedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
 import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
 import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
@@ -38,7 +39,6 @@ import org.apache.zookeeper.KeeperException;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.Set;
 
 /**
@@ -81,7 +81,7 @@ public class RecoverMetaProcedure
 
   /**
    * This constructor is also used when deserializing from a procedure store; we'll construct one
-   * of these then call {@link #deserializeStateData(InputStream)}. Do not use directly.
+   * of these then call #deserializeStateData(InputStream). Do not use directly.
    */
   public RecoverMetaProcedure() {
     this(null, false);
@@ -183,22 +183,24 @@ public class RecoverMetaProcedure
   }
 
   @Override
-  protected void serializeStateData(OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
     MasterProcedureProtos.RecoverMetaStateData.Builder state =
         MasterProcedureProtos.RecoverMetaStateData.newBuilder().setShouldSplitWal(shouldSplitWal);
     if (failedMetaServer != null) {
       state.setFailedMetaServer(ProtobufUtil.toServerName(failedMetaServer));
     }
     state.setReplicaId(replicaId);
-    state.build().writeDelimitedTo(stream);
+    serializer.serialize(state.build());
   }
 
   @Override
-  protected void deserializeStateData(InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
     MasterProcedureProtos.RecoverMetaStateData state =
-        MasterProcedureProtos.RecoverMetaStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.RecoverMetaStateData.class);
     this.shouldSplitWal = state.hasShouldSplitWal() && state.getShouldSplitWal();
     this.failedMetaServer = state.hasFailedMetaServer() ?
         ProtobufUtil.toServerName(state.getFailedMetaServer()) : null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
index 4930396..754bb1d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -45,6 +43,7 @@ import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.master.MetricsSnapshot;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -229,8 +228,9 @@ public class RestoreSnapshotProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.RestoreSnapshotStateData.Builder restoreSnapshotMsg =
       MasterProcedureProtos.RestoreSnapshotStateData.newBuilder()
@@ -267,15 +267,16 @@ public class RestoreSnapshotProcedure
         restoreSnapshotMsg.addParentToChildRegionsPairList (parentToChildrenPair);
       }
     }
-    restoreSnapshotMsg.build().writeDelimitedTo(stream);
+    serializer.serialize(restoreSnapshotMsg.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.RestoreSnapshotStateData restoreSnapshotMsg =
-      MasterProcedureProtos.RestoreSnapshotStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.RestoreSnapshotStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(restoreSnapshotMsg.getUserInfo()));
     snapshot = restoreSnapshotMsg.getSnapshot();
     modifiedTableDescriptor =

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index c7af53f..9307c45 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
@@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.master.assignment.AssignProcedure;
 import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
 import org.apache.hadoop.hbase.master.assignment.RegionTransitionProcedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureMetrics;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
 import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
 import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
@@ -96,7 +96,7 @@ implements ServerProcedureInterface {
 
   /**
    * Used when deserializing from a procedure store; we'll construct one of these then call
-   * {@link #deserializeStateData(InputStream)}. Do not use directly.
+   * #deserializeStateData(InputStream). Do not use directly.
    */
   public ServerCrashProcedure() {
     super();
@@ -285,8 +285,9 @@ implements ServerProcedureInterface {
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.ServerCrashStateData.Builder state =
       MasterProcedureProtos.ServerCrashStateData.newBuilder().
@@ -298,15 +299,16 @@ implements ServerProcedureInterface {
         state.addRegionsOnCrashedServer(HRegionInfo.convert(hri));
       }
     }
-    state.build().writeDelimitedTo(stream);
+    serializer.serialize(state.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.ServerCrashStateData state =
-      MasterProcedureProtos.ServerCrashStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.ServerCrashStateData.class);
     this.serverName = ProtobufUtil.toServerName(state.getServerName());
     this.carryingMeta = state.hasCarryingMeta()? state.getCarryingMeta(): false;
     // shouldSplitWAL has a default over in pb so this invocation will always work.

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
index 506c67d..8f21298 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -35,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.exceptions.HBaseException;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
@@ -209,8 +208,9 @@ public class TruncateTableProcedure
   }
 
   @Override
-  public void serializeStateData(final OutputStream stream) throws IOException {
-    super.serializeStateData(stream);
+  protected void serializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.serializeStateData(serializer);
 
     MasterProcedureProtos.TruncateTableStateData.Builder state =
       MasterProcedureProtos.TruncateTableStateData.newBuilder()
@@ -226,15 +226,16 @@ public class TruncateTableProcedure
         state.addRegionInfo(HRegionInfo.convert(hri));
       }
     }
-    state.build().writeDelimitedTo(stream);
+    serializer.serialize(state.build());
   }
 
   @Override
-  public void deserializeStateData(final InputStream stream) throws IOException {
-    super.deserializeStateData(stream);
+  protected void deserializeStateData(ProcedureStateSerializer serializer)
+      throws IOException {
+    super.deserializeStateData(serializer);
 
     MasterProcedureProtos.TruncateTableStateData state =
-      MasterProcedureProtos.TruncateTableStateData.parseDelimitedFrom(stream);
+        serializer.deserialize(MasterProcedureProtos.TruncateTableStateData.class);
     setUser(MasterProcedureUtil.toUserInfo(state.getUserInfo()));
     if (state.hasTableSchema()) {
       tableDescriptor = ProtobufUtil.toTableDescriptor(state.getTableSchema());

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
index 979a351..81bcc0c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
@@ -27,14 +27,14 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.errorhandling.ForeignException;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -45,9 +45,9 @@ import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.MetricsSnapshot;
 import org.apache.hadoop.hbase.master.SnapshotSentinel;
 import org.apache.hadoop.hbase.master.locking.LockManager;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
 import org.apache.hadoop.hbase.monitoring.TaskMonitor;
+import org.apache.hadoop.hbase.procedure2.LockType;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
@@ -114,7 +114,7 @@ public abstract class TakeSnapshotHandler extends EventHandler implements Snapsh
     this.snapshotManifest = SnapshotManifest.create(conf, fs, workingDir, snapshot, monitor);
 
     this.tableLock = master.getLockManager().createMasterLock(
-        snapshotTable, LockProcedure.LockType.EXCLUSIVE,
+        snapshotTable, LockType.EXCLUSIVE,
         this.getClass().getName() + ": take snapshot " + snapshot.getName());
 
     // prepare the verify
@@ -134,6 +134,7 @@ public abstract class TakeSnapshotHandler extends EventHandler implements Snapsh
     return htd;
   }
 
+  @Override
   public TakeSnapshotHandler prepare() throws Exception {
     super.prepare();
     // after this, you should ensure to release this lock in case of exceptions

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 1e63b13..2229bd2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Tag;
@@ -78,9 +77,10 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.locking.LockProcedure;
-import org.apache.hadoop.hbase.master.locking.LockProcedure.LockType;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.net.Address;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -1208,31 +1208,32 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
   }
 
   @Override
-  public void preListProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
+  public void preGetProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
       throws IOException {
-    // We are delegating the authorization check to postListProcedures as we don't have
+    // We are delegating the authorization check to postGetProcedures as we don't have
     // any concrete set of procedures to work with
   }
 
   @Override
-  public void postListProcedures(
+  public void postGetProcedures(
       ObserverContext<MasterCoprocessorEnvironment> ctx,
-      List<ProcedureInfo> procInfoList) throws IOException {
-    if (procInfoList.isEmpty()) {
+      List<Procedure<?>> procList) throws IOException {
+    if (procList.isEmpty()) {
       return;
     }
 
     // Retains only those which passes authorization checks, as the checks weren't done as part
-    // of preListProcedures.
-    Iterator<ProcedureInfo> itr = procInfoList.iterator();
+    // of preGetProcedures.
+    Iterator<Procedure<?>> itr = procList.iterator();
     User user = getActiveUser(ctx);
     while (itr.hasNext()) {
-      ProcedureInfo procInfo = itr.next();
+      Procedure<?> proc = itr.next();
       try {
-        if (!ProcedureInfo.isProcedureOwner(procInfo, user)) {
+        String owner = proc.getOwner();
+        if (owner == null || !owner.equals(user.getShortName())) {
           // If the user is not the procedure owner, then we should further probe whether
           // he can see the procedure.
-          requirePermission(user, "listProcedures", Action.ADMIN);
+          requirePermission(user, "getProcedures", Action.ADMIN);
         }
       } catch (AccessDeniedException e) {
         itr.remove();
@@ -1241,6 +1242,13 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
   }
 
   @Override
+  public void preGetLocks(ObserverContext<MasterCoprocessorEnvironment> ctx)
+      throws IOException {
+    User user = getActiveUser(ctx);
+    requirePermission(user, "getLocks", Action.ADMIN);
+  }
+
+  @Override
   public void preMove(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo region,
       ServerName srcServer, ServerName destServer) throws IOException {
     requirePermission(getActiveUser(c), "move", region.getTable(), null, null, Action.ADMIN);

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index ce62fa1..63a41cc 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -29,14 +29,16 @@
   import="org.apache.hadoop.hbase.HBaseConfiguration"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
-  import="org.apache.hadoop.hbase.ProcedureInfo"
-  import="org.apache.hadoop.hbase.procedure2.LockInfo"
+  import="org.apache.hadoop.hbase.master.procedure.ProcedureDescriber"
+  import="org.apache.hadoop.hbase.procedure2.LockedResource"
   import="org.apache.hadoop.hbase.procedure2.Procedure"
   import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
+  import="org.apache.hadoop.hbase.procedure2.ProcedureUtil"
   import="org.apache.hadoop.hbase.procedure2.store.wal.ProcedureWALFile"
   import="org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore"
   import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-
+  import="org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos"
+  import="org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil"
 %>
 <%
   HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
@@ -48,7 +50,7 @@
   long millisFromLastRoll = walStore.getMillisFromLastRoll();
   ArrayList<ProcedureWALFile> procedureWALFiles = walStore.getActiveLogs();
   Set<ProcedureWALFile> corruptedWALFiles = walStore.getCorruptedLogs();
-  List<Procedure> procedures = procExecutor.listProcedures();
+  List<Procedure<?>> procedures = procExecutor.getProcedures();
   Collections.sort(procedures, new Comparator<Procedure>() {
     @Override
     public int compare(Procedure lhs, Procedure rhs) {
@@ -58,7 +60,7 @@
     }
   });
 
-  List<LockInfo> locks = master.listLocks();
+  List<LockedResource> lockedResources = master.getLocks();
 %>
 <!DOCTYPE html>
 <?xml version="1.0" encoding="UTF-8" ?>
@@ -118,17 +120,19 @@
         <th>Start Time</th>
         <th>Last Update</th>
         <th>Errors</th>
+        <th>Parameters</th>
     </tr>
     <% for (Procedure<?> proc : procedures) { %>
       <tr>
         <td><%= proc.getProcId() %></td>
         <td><%= proc.hasParent() ? proc.getParentProcId() : "" %></td>
         <td><%= escapeXml(proc.getState().toString()) %></td>
-        <td><%= escapeXml(proc.getOwner()) %></td>
+        <td><%= proc.hasOwner() ? escapeXml(proc.getOwner()) : "" %></td>
         <td><%= escapeXml(proc.getProcName()) %></td>
         <td><%= new Date(proc.getSubmittedTime()) %></td>
         <td><%= new Date(proc.getLastUpdate()) %></td>
         <td><%= escapeXml(proc.isFailed() ? proc.getException().unwrapRemoteIOException().getMessage() : "") %></td>
+        <td><%= escapeXml(ProcedureDescriber.describeParameters(proc)) %></td>
       </tr>
     <% } %>
   </table>
@@ -244,40 +248,35 @@
           <h1>Locks</h1>
       </div>
   </div>
-  <% for (LockInfo lock : locks) { %>
-    <h2><%= lock.getResourceType() %>: <%= lock.getResourceName() %></h2>
+  <% for (LockedResource lockedResource : lockedResources) { %>
+    <h2><%= lockedResource.getResourceType() %>: <%= lockedResource.getResourceName() %></h2>
     <%
-      switch (lock.getLockType()) {
+      switch (lockedResource.getLockType()) {
       case EXCLUSIVE:
     %>
     <p>Lock type: EXCLUSIVE</p>
-    <p>Owner procedure ID: <%= lock.getExclusiveLockOwnerProcedure().getProcId() %></p>
+    <p>Owner procedure: <%= escapeXml(ProcedureDescriber.describe(lockedResource.getExclusiveLockOwnerProcedure())) %></p>
     <%
         break;
       case SHARED:
     %>
     <p>Lock type: SHARED</p>
-    <p>Number of shared locks: <%= lock.getSharedLockCount() %></p>
+    <p>Number of shared locks: <%= lockedResource.getSharedLockCount() %></p>
     <%
         break;
       }
 
-      List<LockInfo.WaitingProcedure> waitingProcedures = lock.getWaitingProcedures();
+      List<Procedure<?>> waitingProcedures = lockedResource.getWaitingProcedures();
 
       if (!waitingProcedures.isEmpty()) {
     %>
 	    <h3>Waiting procedures</h3>
 	    <table class="table table-striped" width="90%" >
-		    <tr>
-		      <th>Lock type</th>
-		      <th>Procedure ID</th>
-		    </tr>
-		    <% for (LockInfo.WaitingProcedure waitingProcedure : waitingProcedures) { %>
-		      <tr>
-	          <td><%= waitingProcedure.getLockType() %></td>
-	          <td><%= waitingProcedure.getProcedure().getProcId() %></td>
-		      </tr>
-		    <% } %>
+        <% for (Procedure<?> proc : procedures) { %>
+         <tr>
+            <td><%= escapeXml(ProcedureDescriber.describe(proc)) %></td>
+          </tr>
+        <% } %>
 	    </table>
     <% } %>
   <% } %>

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 7bdda80..cfc4911 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -44,8 +44,6 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.NotServingRegionException;
-import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
@@ -683,9 +681,15 @@ public class TestAdmin2 {
   }
 
   @Test(timeout = 300000)
-  public void testListProcedures() throws Exception {
-    ProcedureInfo[] procList = admin.listProcedures();
-    assertTrue(procList.length >= 0);
+  public void testGetProcedures() throws Exception {
+    String procList = admin.getProcedures();
+    assertTrue(procList.startsWith("["));
+  }
+
+  @Test(timeout = 300000)
+  public void testGetLocks() throws Exception {
+    String lockList = admin.getLocks();
+    assertTrue(lockList.startsWith("["));
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
index b7fac06..9ef26d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
@@ -18,9 +18,16 @@
 
 package org.apache.hadoop.hbase.client;
 
+import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.procedure.ProcedureManagerHost;
 import org.apache.hadoop.hbase.procedure.SimpleMasterProcedureManager;
@@ -35,16 +42,6 @@ import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
 /**
  * Class to test asynchronous procedure admin operations.
  */
@@ -98,8 +95,8 @@ public class TestAsyncProcedureAdminApi extends TestAsyncAdminBase {
 
   @Test
   public void listProcedure() throws Exception {
-    List<ProcedureInfo> procList = admin.listProcedures().get();
-    assertTrue(procList.size() >= 0);
+    String procList = admin.getProcedures().get();
+    assertTrue(procList.startsWith("["));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 1b8b27b..49ab418 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
@@ -60,7 +59,9 @@ import org.apache.hadoop.hbase.master.RegionPlan;
 import org.apache.hadoop.hbase.master.locking.LockProcedure;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.net.Address;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -126,10 +127,10 @@ public class TestMasterObserver {
     private boolean postDisableTableCalled;
     private boolean preAbortProcedureCalled;
     private boolean postAbortProcedureCalled;
-    private boolean preListProceduresCalled;
-    private boolean postListProceduresCalled;
-    private boolean preListLocksCalled;
-    private boolean postListLocksCalled;
+    private boolean preGetProceduresCalled;
+    private boolean postGetProceduresCalled;
+    private boolean preGetLocksCalled;
+    private boolean postGetLocksCalled;
     private boolean preMoveCalled;
     private boolean postMoveCalled;
     private boolean preAssignCalled;
@@ -224,8 +225,10 @@ public class TestMasterObserver {
       postDisableTableCalled = false;
       preAbortProcedureCalled = false;
       postAbortProcedureCalled = false;
-      preListProceduresCalled = false;
-      postListProceduresCalled = false;
+      preGetProceduresCalled = false;
+      postGetProceduresCalled = false;
+      preGetLocksCalled = false;
+      postGetLocksCalled = false;
       preMoveCalled= false;
       postMoveCalled = false;
       preAssignCalled = false;
@@ -710,43 +713,43 @@ public class TestMasterObserver {
     }
 
     @Override
-    public void preListProcedures(
+    public void preGetProcedures(
         ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException {
-      preListProceduresCalled = true;
+      preGetProceduresCalled = true;
     }
 
     @Override
-    public void postListProcedures(
+    public void postGetProcedures(
         ObserverContext<MasterCoprocessorEnvironment> ctx,
-        List<ProcedureInfo> procInfoList) throws IOException {
-      postListProceduresCalled = true;
+        List<Procedure<?>> procInfoList) throws IOException {
+      postGetProceduresCalled = true;
     }
 
-    public boolean wasListProceduresCalled() {
-      return preListProceduresCalled && postListProceduresCalled;
+    public boolean wasGetProceduresCalled() {
+      return preGetProceduresCalled && postGetProceduresCalled;
     }
 
-    public boolean wasPreListProceduresCalledOnly() {
-      return preListProceduresCalled && !postListProceduresCalled;
+    public boolean wasPreGetProceduresCalledOnly() {
+      return preGetProceduresCalled && !postGetProceduresCalled;
     }
 
     @Override
-    public void preListLocks(ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException {
-      preListLocksCalled = true;
+    public void preGetLocks(ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException {
+      preGetLocksCalled = true;
     }
 
     @Override
-    public void postListLocks(ObserverContext<MasterCoprocessorEnvironment> ctx, List<LockInfo> lockInfoList)
+    public void postGetLocks(ObserverContext<MasterCoprocessorEnvironment> ctx, List<LockedResource> lockedResources)
         throws IOException {
-      postListLocksCalled = true;
+      postGetLocksCalled = true;
     }
 
-    public boolean wasListLocksCalled() {
-      return preListLocksCalled && postListLocksCalled;
+    public boolean wasGetLocksCalled() {
+      return preGetLocksCalled && postGetLocksCalled;
     }
 
-    public boolean wasPreListLocksCalledOnly() {
-      return preListLocksCalled && !postListLocksCalled;
+    public boolean wasPreGetLocksCalledOnly() {
+      return preGetLocksCalled && !postGetLocksCalled;
     }
 
     @Override
@@ -1546,14 +1549,14 @@ public class TestMasterObserver {
 
     @Override
     public void preRequestLock(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace,
-        TableName tableName, HRegionInfo[] regionInfos, LockProcedure.LockType type,
+        TableName tableName, HRegionInfo[] regionInfos, LockType type,
         String description) throws IOException {
       preRequestLockCalled = true;
     }
 
     @Override
     public void postRequestLock(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace,
-        TableName tableName, HRegionInfo[] regionInfos, LockProcedure.LockType type,
+        TableName tableName, HRegionInfo[] regionInfos, LockType type,
         String description) throws IOException {
       postRequestLockCalled = true;
     }
@@ -2173,7 +2176,7 @@ public class TestMasterObserver {
   }
 
   @Test (timeout=180000)
-  public void testListProceduresOperation() throws Exception {
+  public void testGetProceduresOperation() throws Exception {
     MiniHBaseCluster cluster = UTIL.getHBaseCluster();
 
     HMaster master = cluster.getMaster();
@@ -2182,14 +2185,14 @@ public class TestMasterObserver {
         CPMasterObserver.class.getName());
     cp.resetStates();
 
-    master.listProcedures();
+    master.getProcedures();
     assertTrue(
-      "Coprocessor should be called on list procedures request",
-      cp.wasListProceduresCalled());
+      "Coprocessor should be called on get procedures request",
+      cp.wasGetProceduresCalled());
   }
 
   @Test (timeout=180000)
-  public void testListLocksOperation() throws Exception {
+  public void testGetLocksOperation() throws Exception {
     MiniHBaseCluster cluster = UTIL.getHBaseCluster();
 
     HMaster master = cluster.getMaster();
@@ -2198,10 +2201,10 @@ public class TestMasterObserver {
         CPMasterObserver.class.getName());
     cp.resetStates();
 
-    master.listLocks();
+    master.getLocks();
     assertTrue(
-      "Coprocessor should be called on list locks request",
-      cp.wasListLocksCalled());
+      "Coprocessor should be called on get locks request",
+      cp.wasGetLocksCalled());
   }
 
   private void deleteTable(Admin admin, TableName tableName) throws Exception {
@@ -2222,7 +2225,7 @@ public class TestMasterObserver {
 
     final TableName tableName = TableName.valueOf("testLockedTable");
     long procId = master.getLockManager().remoteLocks().requestTableLock(tableName,
-          LockProcedure.LockType.EXCLUSIVE, "desc", null);
+          LockType.EXCLUSIVE, "desc", null);
     master.getLockManager().remoteLocks().lockHeartbeat(procId, false);
 
     assertTrue(cp.preAndPostForQueueLockAndHeartbeatLockCalled());

http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
index 9f4ce35..e96ce6d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ChoreService;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
@@ -41,7 +40,8 @@ import org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
@@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+
 import com.google.protobuf.Service;
 
 import static org.mockito.Mockito.mock;
@@ -226,12 +227,12 @@ public class MockNoopMasterServices implements MasterServices, Server {
   }
 
   @Override
-  public List<ProcedureInfo> listProcedures() throws IOException {
+  public List<Procedure<?>> getProcedures() throws IOException {
     return null;  //To change body of implemented methods use File | Settings | File Templates.
   }
 
   @Override
-  public List<LockInfo> listLocks() throws IOException {
+  public List<LockedResource> getLocks() throws IOException {
     return null;
   }