You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@hbase.apache.org by GitBox <gi...@apache.org> on 2020/12/01 12:00:49 UTC

[GitHub] [hbase] virajjasani commented on a change in pull request #2706: [HBASE-25246] Backup/Restore hbase cell tags.

virajjasani commented on a change in pull request #2706:
URL: https://github.com/apache/hbase/pull/2706#discussion_r533343566



##########
File path: hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java
##########
@@ -479,4 +487,40 @@ public void testRegionLockInfo() {
         + "\"sharedLockCount\":0"
         + "}]", lockJson);
   }
+
+  /**
+   * Test @{@link ProtobufUtil#toCell(Cell)} and
+   * @{@link ProtobufUtil#toCell(ExtendedCellBuilder, CellProtos.Cell)} conversion

Review comment:
       nit: extra `@`?

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -801,4 +816,149 @@ public boolean isWALVisited() {
       return isVisited;
     }
   }
+
+  /**
+   *  Add cell tags to delete mutations, run export and import tool and
+   *  verify that tags are present in import table also.
+   * @throws Throwable throws Throwable.
+   */
+  @Test
+  public void testTagsAddition() throws Throwable {
+    final TableName exportTable = TableName.valueOf(name.getMethodName());
+    TableDescriptor desc = TableDescriptorBuilder
+      .newBuilder(exportTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(desc);
+
+    Table exportT = UTIL.getConnection().getTable(exportTable);
+
+    //Add first version of QUAL
+    Put p = new Put(ROW1);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    exportT.put(p);
+
+    //Add Delete family marker
+    Delete d = new Delete(ROW1, now+3);
+    // Add test attribute to delete mutation.
+    d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+    exportT.delete(d);
+
+    // Run export too with KeyValueCodecWithTags as Codec. This will ensure that export tool
+    // will use KeyValueCodecWithTags.
+    String[] args = new String[] {
+      "-D" + ExportUtils.RAW_SCAN + "=true",
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      exportTable.getNameAsString(),
+      FQ_OUTPUT_DIR,
+      "1000", // max number of key versions per key to export
+    };
+    assertTrue(runExport(args));
+
+    // Create an import table with MetadataController.
+    final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
+    TableDescriptor importTableDesc = TableDescriptorBuilder
+      .newBuilder(importTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(importTableDesc);
+
+    // Run import tool.
+    args = new String[] {
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      importTable.getNameAsString(),
+      FQ_OUTPUT_DIR
+    };
+    assertTrue(runImport(args));
+    // Make sure that tags exists in both exported and imported table.
+    assertTagExists(exportTable);
+    assertTagExists(importTable);
+  }
+
+  private void assertTagExists(TableName table) throws IOException {
+    List<Cell> values = new ArrayList<>();
+    for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
+      values.clear();

Review comment:
       nit: redundant

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -801,4 +816,149 @@ public boolean isWALVisited() {
       return isVisited;
     }
   }
+
+  /**
+   *  Add cell tags to delete mutations, run export and import tool and
+   *  verify that tags are present in import table also.
+   * @throws Throwable throws Throwable.
+   */
+  @Test
+  public void testTagsAddition() throws Throwable {
+    final TableName exportTable = TableName.valueOf(name.getMethodName());
+    TableDescriptor desc = TableDescriptorBuilder
+      .newBuilder(exportTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(desc);
+
+    Table exportT = UTIL.getConnection().getTable(exportTable);
+
+    //Add first version of QUAL
+    Put p = new Put(ROW1);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    exportT.put(p);
+
+    //Add Delete family marker
+    Delete d = new Delete(ROW1, now+3);
+    // Add test attribute to delete mutation.
+    d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+    exportT.delete(d);
+
+    // Run export too with KeyValueCodecWithTags as Codec. This will ensure that export tool
+    // will use KeyValueCodecWithTags.
+    String[] args = new String[] {
+      "-D" + ExportUtils.RAW_SCAN + "=true",
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      exportTable.getNameAsString(),
+      FQ_OUTPUT_DIR,
+      "1000", // max number of key versions per key to export
+    };
+    assertTrue(runExport(args));
+
+    // Create an import table with MetadataController.
+    final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
+    TableDescriptor importTableDesc = TableDescriptorBuilder
+      .newBuilder(importTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(importTableDesc);
+
+    // Run import tool.
+    args = new String[] {
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      importTable.getNameAsString(),
+      FQ_OUTPUT_DIR
+    };
+    assertTrue(runImport(args));
+    // Make sure that tags exists in both exported and imported table.
+    assertTagExists(exportTable);
+    assertTagExists(importTable);
+  }
+
+  private void assertTagExists(TableName table) throws IOException {
+    List<Cell> values = new ArrayList<>();
+    for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
+      values.clear();
+      Scan scan = new Scan();
+      // Make sure to set rawScan to true so that we will get Delete Markers.
+      scan.setRaw(true);
+      scan.readAllVersions();
+      scan.withStartRow(ROW1);
+      // Need to use RegionScanner instead of table#getScanner since the latter will
+      // not return tags since it will go through rpc layer and remove tags intentionally.
+      RegionScanner scanner = region.getScanner(scan);
+      scanner.next(values);
+      if (!values.isEmpty()) {
+        break;
+      }
+    }
+    boolean deleteFound = false;
+    for (Cell cell: values) {
+      if (PrivateCellUtil.isDelete(cell.getType().getCode())) {
+        deleteFound = true;
+        List<Tag> tags = PrivateCellUtil.getTags(cell);
+        Assert.assertEquals(1, tags.size());
+        for (Tag tag : tags) {
+          Assert.assertEquals(TEST_TAG, Tag.getValueAsString(tag));
+        }
+      }
+    }
+    Assert.assertTrue(deleteFound);
+  }
+
+  /*
+    This co-proc will add a cell tag to delete mutation.
+   */
+  public static class MetadataController implements RegionCoprocessor, RegionObserver {

Review comment:
       Nice one!

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -117,6 +129,9 @@
   private static final long now = System.currentTimeMillis();
   private final TableName EXPORT_TABLE = TableName.valueOf("export_table");
   private final TableName IMPORT_TABLE = TableName.valueOf("import_table");
+  public static final byte TEST_TAG_TYPE = (byte)33;
+  public static final String TEST_ATTR = "source_op";
+  public static final String TEST_TAG = new String("test-tag");

Review comment:
       nit: `TEST_TAG = "test-tag"`

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -117,6 +129,9 @@
   private static final long now = System.currentTimeMillis();
   private final TableName EXPORT_TABLE = TableName.valueOf("export_table");
   private final TableName IMPORT_TABLE = TableName.valueOf("import_table");
+  public static final byte TEST_TAG_TYPE = (byte)33;

Review comment:
       Want to use `CUSTOM_TAG_TYPE_RANGE + 1` here just to indicate this is custom Tag type?

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -801,4 +816,149 @@ public boolean isWALVisited() {
       return isVisited;
     }
   }
+
+  /**
+   *  Add cell tags to delete mutations, run export and import tool and
+   *  verify that tags are present in import table also.
+   * @throws Throwable throws Throwable.
+   */
+  @Test
+  public void testTagsAddition() throws Throwable {
+    final TableName exportTable = TableName.valueOf(name.getMethodName());
+    TableDescriptor desc = TableDescriptorBuilder
+      .newBuilder(exportTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(desc);
+
+    Table exportT = UTIL.getConnection().getTable(exportTable);
+
+    //Add first version of QUAL
+    Put p = new Put(ROW1);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    exportT.put(p);
+
+    //Add Delete family marker
+    Delete d = new Delete(ROW1, now+3);
+    // Add test attribute to delete mutation.
+    d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+    exportT.delete(d);
+
+    // Run export too with KeyValueCodecWithTags as Codec. This will ensure that export tool
+    // will use KeyValueCodecWithTags.
+    String[] args = new String[] {
+      "-D" + ExportUtils.RAW_SCAN + "=true",
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      exportTable.getNameAsString(),
+      FQ_OUTPUT_DIR,
+      "1000", // max number of key versions per key to export
+    };
+    assertTrue(runExport(args));
+
+    // Create an import table with MetadataController.
+    final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
+    TableDescriptor importTableDesc = TableDescriptorBuilder
+      .newBuilder(importTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(importTableDesc);
+
+    // Run import tool.
+    args = new String[] {
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      importTable.getNameAsString(),
+      FQ_OUTPUT_DIR
+    };
+    assertTrue(runImport(args));
+    // Make sure that tags exists in both exported and imported table.
+    assertTagExists(exportTable);

Review comment:
       Maybe `assertTagExists(exportTable)` can be moved right before where `importTable` is defined? just to ensure that even before we create importTable, exportTable already has tag available.
   Not very strong point, it's upto you.

##########
File path: hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
##########
@@ -801,4 +816,149 @@ public boolean isWALVisited() {
       return isVisited;
     }
   }
+
+  /**
+   *  Add cell tags to delete mutations, run export and import tool and
+   *  verify that tags are present in import table also.
+   * @throws Throwable throws Throwable.
+   */
+  @Test
+  public void testTagsAddition() throws Throwable {
+    final TableName exportTable = TableName.valueOf(name.getMethodName());
+    TableDescriptor desc = TableDescriptorBuilder
+      .newBuilder(exportTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(desc);
+
+    Table exportT = UTIL.getConnection().getTable(exportTable);
+
+    //Add first version of QUAL
+    Put p = new Put(ROW1);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    exportT.put(p);
+
+    //Add Delete family marker
+    Delete d = new Delete(ROW1, now+3);
+    // Add test attribute to delete mutation.
+    d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+    exportT.delete(d);
+
+    // Run export too with KeyValueCodecWithTags as Codec. This will ensure that export tool
+    // will use KeyValueCodecWithTags.
+    String[] args = new String[] {
+      "-D" + ExportUtils.RAW_SCAN + "=true",
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      exportTable.getNameAsString(),
+      FQ_OUTPUT_DIR,
+      "1000", // max number of key versions per key to export
+    };
+    assertTrue(runExport(args));
+
+    // Create an import table with MetadataController.
+    final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
+    TableDescriptor importTableDesc = TableDescriptorBuilder
+      .newBuilder(importTable)
+      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA)
+        .setMaxVersions(5)
+        .setKeepDeletedCells(KeepDeletedCells.TRUE)
+        .build())
+      .setCoprocessor(MetadataController.class.getName())
+      .build();
+    UTIL.getAdmin().createTable(importTableDesc);
+
+    // Run import tool.
+    args = new String[] {
+      // This will make sure that codec will encode and decode tags in rpc call.
+      "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+      importTable.getNameAsString(),
+      FQ_OUTPUT_DIR
+    };
+    assertTrue(runImport(args));
+    // Make sure that tags exists in both exported and imported table.
+    assertTagExists(exportTable);
+    assertTagExists(importTable);
+  }
+
+  private void assertTagExists(TableName table) throws IOException {
+    List<Cell> values = new ArrayList<>();
+    for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
+      values.clear();
+      Scan scan = new Scan();
+      // Make sure to set rawScan to true so that we will get Delete Markers.
+      scan.setRaw(true);
+      scan.readAllVersions();
+      scan.withStartRow(ROW1);
+      // Need to use RegionScanner instead of table#getScanner since the latter will
+      // not return tags since it will go through rpc layer and remove tags intentionally.
+      RegionScanner scanner = region.getScanner(scan);
+      scanner.next(values);
+      if (!values.isEmpty()) {
+        break;
+      }
+    }
+    boolean deleteFound = false;
+    for (Cell cell: values) {
+      if (PrivateCellUtil.isDelete(cell.getType().getCode())) {
+        deleteFound = true;
+        List<Tag> tags = PrivateCellUtil.getTags(cell);
+        Assert.assertEquals(1, tags.size());
+        for (Tag tag : tags) {
+          Assert.assertEquals(TEST_TAG, Tag.getValueAsString(tag));
+        }
+      }
+    }
+    Assert.assertTrue(deleteFound);
+  }
+
+  /*
+    This co-proc will add a cell tag to delete mutation.
+   */
+  public static class MetadataController implements RegionCoprocessor, RegionObserver {
+    @Override
+    public Optional<RegionObserver> getRegionObserver() {
+      return Optional.of(this);
+    }
+
+    @Override
+    public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
+                               MiniBatchOperationInProgress<Mutation> miniBatchOp)
+      throws IOException {
+      if (c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) {
+        return;
+      }
+      for (int i = 0; i < miniBatchOp.size(); i++) {
+        Mutation m = miniBatchOp.getOperation(i);
+        if (!(m instanceof Delete)) {
+          continue;
+        }
+        byte[] sourceOpAttr = m.getAttribute(TEST_ATTR);
+        if (sourceOpAttr == null) {
+          continue;
+        }
+        Tag sourceOpTag = new ArrayBackedTag(TEST_TAG_TYPE, sourceOpAttr);
+        List<Cell> updatedCells = new ArrayList<>();
+        for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance(); ) {
+          Cell cell = cellScanner.current();
+          List<Tag> tags = PrivateCellUtil.getTags(cell);
+          tags.add(sourceOpTag);
+          Cell updatedCell = PrivateCellUtil.createCell(cell, tags);
+          updatedCells.add(updatedCell);
+        }
+        m.getFamilyCellMap().clear();
+        // Clear and add new Cells to the Mutation.
+        for (Cell cell : updatedCells) {
+          if (m instanceof Delete) {

Review comment:
       This is always true because of above
   ```
           if (!(m instanceof Delete)) {
             continue;
           }
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org