You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2019/12/29 13:56:33 UTC

[hbase] branch branch-2 updated: HBASE-23618 Add a tool to dump procedure info in the WAL file (#969)

This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 2830c96  HBASE-23618 Add a tool to dump procedure info in the WAL file (#969)
2830c96 is described below

commit 2830c962546f40b4cb566376bbffef43388981ab
Author: Duo Zhang <zh...@apache.org>
AuthorDate: Sun Dec 29 21:43:50 2019 +0800

    HBASE-23618 Add a tool to dump procedure info in the WAL file (#969)
    
    Signed-off-by: stack <st...@apache.org>
---
 .../store/region/RegionProcedureStore.java         |   2 +-
 .../store/region/WALProcedurePrettyPrinter.java    | 133 +++++++++++++++++++++
 .../apache/hadoop/hbase/wal/WALPrettyPrinter.java  |  29 +++--
 .../region/TestWALProcedurePrettyPrinter.java      | 130 ++++++++++++++++++++
 4 files changed, 281 insertions(+), 13 deletions(-)

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStore.java
index fe3ceb8..ae0a54d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStore.java
@@ -120,7 +120,7 @@ public class RegionProcedureStore extends ProcedureStoreBase {
 
   private static final TableName TABLE_NAME = TableName.valueOf("master:procedure");
 
-  private static final byte[] FAMILY = Bytes.toBytes("p");
+  static final byte[] FAMILY = Bytes.toBytes("p");
 
   private static final byte[] PROC_QUALIFIER = Bytes.toBytes("d");
 
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/WALProcedurePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/WALProcedurePrettyPrinter.java
new file mode 100644
index 0000000..35f9fc0
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure2/store/region/WALProcedurePrettyPrinter.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.procedure2.store.region;
+
+import static org.apache.hadoop.hbase.procedure2.store.region.RegionProcedureStore.FAMILY;
+
+import java.io.PrintStream;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.format.DateTimeFormatter;
+import java.util.Map;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.util.AbstractHBaseTool;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALEdit;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALPrettyPrinter;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+
+import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
+
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+
+/**
+ * A tool to dump the procedures in the WAL files.
+ * <p/>
+ * The different between this and {@link WALPrettyPrinter} is that, this class will decode the
+ * procedure in the WALEdit for better debugging. You are free to use {@link WALPrettyPrinter} to
+ * dump the safe file as well.
+ */
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
+@InterfaceStability.Evolving
+public class WALProcedurePrettyPrinter extends AbstractHBaseTool {
+
+  private static final String KEY_TMPL = "Sequence=%s, at write timestamp=%s";
+
+  private static final DateTimeFormatter FORMATTER =
+    DateTimeFormatter.ISO_OFFSET_DATE_TIME.withZone(ZoneId.systemDefault());
+
+  private String file;
+
+  private PrintStream out;
+
+  public WALProcedurePrettyPrinter() {
+    this(System.out);
+  }
+
+  public WALProcedurePrettyPrinter(PrintStream out) {
+    this.out = out;
+  }
+
+  @Override
+  protected void addOptions() {
+  }
+
+  @Override
+  protected void processOptions(CommandLine cmd) {
+    if (cmd.getArgList().size() != 1) {
+      throw new IllegalArgumentException("Please specify the file to dump");
+    }
+    file = cmd.getArgList().get(0);
+  }
+
+  @Override
+  protected int doWork() throws Exception {
+    Path path = new Path(file);
+    FileSystem fs = path.getFileSystem(conf);
+    try (WAL.Reader reader = WALFactory.createReader(fs, path, conf)) {
+      for (;;) {
+        WAL.Entry entry = reader.next();
+        if (entry == null) {
+          return 0;
+        }
+        WALKey key = entry.getKey();
+        WALEdit edit = entry.getEdit();
+        long sequenceId = key.getSequenceId();
+        long writeTime = key.getWriteTime();
+        out.println(
+          String.format(KEY_TMPL, sequenceId, FORMATTER.format(Instant.ofEpochMilli(writeTime))));
+        for (Cell cell : edit.getCells()) {
+          Map<String, Object> op = WALPrettyPrinter.toStringMap(cell);
+          if (!Bytes.equals(FAMILY, 0, FAMILY.length, cell.getFamilyArray(), cell.getFamilyOffset(),
+            cell.getFamilyLength())) {
+            // We could have cells other than procedure edits, for example, a flush marker
+            WALPrettyPrinter.printCell(out, op, false);
+            continue;
+          }
+          long procId = Bytes.toLong(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
+          out.println("pid=" + procId + ", type=" + op.get("type") + ", column=" +
+            op.get("family") + ":" + op.get("qualifier"));
+          if (cell.getType() == Cell.Type.Put) {
+            if (cell.getValueLength() > 0) {
+              // should be a normal put
+              Procedure<?> proc =
+                ProcedureUtil.convertToProcedure(ProcedureProtos.Procedure.parser()
+                  .parseFrom(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
+              out.println("\t" + proc.toStringDetails());
+            } else {
+              // should be a 'delete' put
+              out.println("\tmark deleted");
+            }
+          }
+          out.println("cell total size sum: " + cell.heapSize());
+        }
+        out.println("edit heap size: " + edit.heapSize());
+        out.println("position: " + reader.getPosition());
+      }
+    }
+  }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
index b49a451..8c90a4b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
@@ -328,7 +328,7 @@ public class WALPrettyPrinter {
           continue;
         }
         // initialize list into which we will store atomic actions
-        List<Map> actions = new ArrayList<>();
+        List<Map<String, Object>> actions = new ArrayList<>();
         for (Cell cell : edit.getCells()) {
           // add atomic operation to txn
           Map<String, Object> op = new HashMap<>(toStringMap(cell));
@@ -359,16 +359,8 @@ public class WALPrettyPrinter {
           out.println(String.format(outputTmpl,
               txn.get("sequence"), txn.get("table"), txn.get("region"), new Date(writeTime)));
           for (int i = 0; i < actions.size(); i++) {
-            Map op = actions.get(i);
-            out.println("row=" + op.get("row") +
-                ", column=" + op.get("family") + ":" + op.get("qualifier"));
-            if (op.get("tag") != null) {
-              out.println("    tag: " + op.get("tag"));
-            }
-            if (outputValues) {
-              out.println("    value: " + op.get("value"));
-            }
-            out.println("cell total size sum: " + op.get("total_size_sum"));
+            Map<String, Object> op = actions.get(i);
+            printCell(out, op, outputValues);
           }
         }
         out.println("edit heap size: " + entry.getEdit().heapSize());
@@ -382,10 +374,23 @@ public class WALPrettyPrinter {
     }
   }
 
-  private static Map<String, Object> toStringMap(Cell cell) {
+  public static void printCell(PrintStream out, Map<String, Object> op, boolean outputValues) {
+    out.println("row=" + op.get("row") + ", type=" + op.get("type") + ", column=" +
+      op.get("family") + ":" + op.get("qualifier"));
+    if (op.get("tag") != null) {
+      out.println("    tag: " + op.get("tag"));
+    }
+    if (outputValues) {
+      out.println("    value: " + op.get("value"));
+    }
+    out.println("cell total size sum: " + op.get("total_size_sum"));
+  }
+
+  public static Map<String, Object> toStringMap(Cell cell) {
     Map<String, Object> stringMap = new HashMap<>();
     stringMap.put("row",
         Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
+    stringMap.put("type", cell.getType());
     stringMap.put("family", Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
                 cell.getFamilyLength()));
     stringMap.put("qualifier",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java
new file mode 100644
index 0000000..f9b816e
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.procedure2.store.region;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
+import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Category({ MasterTests.class, MediumTests.class })
+public class TestWALProcedurePrettyPrinter {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(TestWALProcedurePrettyPrinter.class);
+
+  private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedurePrettyPrinter.class);
+
+  private HBaseCommonTestingUtility htu;
+
+  private RegionProcedureStore store;
+
+  @Before
+  public void setUp() throws IOException {
+    htu = new HBaseCommonTestingUtility();
+    htu.getConfiguration().setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
+    Path testDir = htu.getDataTestDir();
+    CommonFSUtils.setWALRootDir(htu.getConfiguration(), testDir);
+    store = RegionProcedureStoreTestHelper.createStore(htu.getConfiguration(), new LoadCounter());
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    store.stop(true);
+    htu.cleanupTestDir();
+  }
+
+  @Test
+  public void test() throws Exception {
+    List<RegionProcedureStoreTestProcedure> procs = new ArrayList<>();
+    for (int i = 0; i < 10; i++) {
+      RegionProcedureStoreTestProcedure proc = new RegionProcedureStoreTestProcedure();
+      store.insert(proc, null);
+      procs.add(proc);
+    }
+    store.region.flush(true);
+    for (int i = 0; i < 5; i++) {
+      store.delete(procs.get(i).getProcId());
+    }
+    store.cleanup();
+    Path walParentDir = new Path(htu.getDataTestDir(),
+      RegionProcedureStore.MASTER_PROCEDURE_DIR + "/" + HConstants.HREGION_LOGDIR_NAME);
+    FileSystem fs = walParentDir.getFileSystem(htu.getConfiguration());
+    Path walDir = fs.listStatus(walParentDir)[0].getPath();
+    Path walFile = fs.listStatus(walDir)[0].getPath();
+    store.walRoller.requestRollAll();
+    store.walRoller.waitUntilWalRollFinished();
+    ByteArrayOutputStream bos = new ByteArrayOutputStream();
+    PrintStream out = new PrintStream(bos);
+    WALProcedurePrettyPrinter printer = new WALProcedurePrettyPrinter(out);
+    assertEquals(0, ToolRunner.run(htu.getConfiguration(), printer,
+      new String[] { fs.makeQualified(walFile).toString() }));
+    try (BufferedReader reader = new BufferedReader(
+      new InputStreamReader(new ByteArrayInputStream(bos.toByteArray()), StandardCharsets.UTF_8))) {
+      long inserted = 0;
+      long markedDeleted = 0;
+      long deleted = 0;
+      for (;;) {
+        String line = reader.readLine();
+        LOG.info(line);
+        if (line == null) {
+          break;
+        }
+        if (line.startsWith("\t")) {
+          if (line.startsWith("\tpid=")) {
+            inserted++;
+          } else {
+            assertEquals("\tmark deleted", line);
+            markedDeleted++;
+          }
+        } else if (line.contains("type=DeleteFamily")) {
+          deleted++;
+        }
+      }
+      assertEquals(10, inserted);
+      assertEquals(5, markedDeleted);
+      assertEquals(5, deleted);
+    }
+  }
+}