You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2021/01/03 17:34:19 UTC

[hbase] branch master updated: HBASE-25439 Add BYTE unit in PrettyPrinter.Unit (#2812)

This is an automated email from the ASF dual-hosted git repository.

stack pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new 8ecfced  HBASE-25439 Add BYTE unit in PrettyPrinter.Unit (#2812)
8ecfced is described below

commit 8ecfcede962015bfce615d602e7e28327d41cdb6
Author: Baiqiang Zhao <zb...@gmail.com>
AuthorDate: Mon Jan 4 01:34:00 2021 +0800

    HBASE-25439 Add BYTE unit in PrettyPrinter.Unit (#2812)
    
    Signed-off-by: stack <st...@apache.org>
---
 .../hbase/client/TableDescriptorBuilder.java       |  37 ++++++-
 .../hbase/client/TestTableDescriptorBuilder.java   |  64 ++++++++++-
 .../java/org/apache/hadoop/hbase/HConstants.java   |   8 ++
 .../apache/hadoop/hbase/util/PrettyPrinter.java    | 122 +++++++++++++++++++++
 hbase-shell/src/main/ruby/hbase/admin.rb           |   4 +-
 5 files changed, 230 insertions(+), 5 deletions(-)

diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index d2cfff5..fd46665 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -41,8 +41,10 @@ import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
 import org.apache.hadoop.hbase.rsgroup.RSGroupInfo;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.PrettyPrinter;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -246,6 +248,15 @@ public class TableDescriptorBuilder {
     RESERVED_KEYWORDS.add(IS_META_KEY);
   }
 
+  public static PrettyPrinter.Unit getUnit(String key) {
+    switch (key) {
+      case MAX_FILESIZE:
+        return PrettyPrinter.Unit.BYTE;
+      default:
+        return PrettyPrinter.Unit.NONE;
+    }
+  }
+
   /**
    * @deprecated namespace table has been folded into the ns family in meta table, do not use this
    *             any more.
@@ -458,11 +469,22 @@ public class TableDescriptorBuilder {
     return this;
   }
 
+  public TableDescriptorBuilder setMaxFileSize(String maxFileSize) throws HBaseException {
+    desc.setMaxFileSize(maxFileSize);
+    return this;
+  }
+
   public TableDescriptorBuilder setMemStoreFlushSize(long memstoreFlushSize) {
     desc.setMemStoreFlushSize(memstoreFlushSize);
     return this;
   }
 
+  public TableDescriptorBuilder setMemStoreFlushSize(String memStoreFlushSize)
+    throws HBaseException {
+    desc.setMemStoreFlushSize(memStoreFlushSize);
+    return this;
+  }
+
   public TableDescriptorBuilder setNormalizerTargetRegionCount(final int regionCount) {
     desc.setNormalizerTargetRegionCount(regionCount);
     return this;
@@ -982,6 +1004,11 @@ public class TableDescriptorBuilder {
       return setValue(MAX_FILESIZE_KEY, Long.toString(maxFileSize));
     }
 
+    public ModifyableTableDescriptor setMaxFileSize(String maxFileSize) throws HBaseException {
+      return setMaxFileSize(Long.parseLong(PrettyPrinter.
+        valueOf(maxFileSize, PrettyPrinter.Unit.BYTE)));
+    }
+
     /**
      * Returns the size of the memstore after which a flush to filesystem is
      * triggered.
@@ -1007,6 +1034,12 @@ public class TableDescriptorBuilder {
       return setValue(MEMSTORE_FLUSHSIZE_KEY, Long.toString(memstoreFlushSize));
     }
 
+    public ModifyableTableDescriptor setMemStoreFlushSize(String memStoreFlushSize)
+      throws HBaseException {
+      return setMemStoreFlushSize(Long.parseLong(PrettyPrinter.valueOf(memStoreFlushSize,
+        PrettyPrinter.Unit.BYTE)));
+    }
+
     /**
      * This sets the class associated with the flush policy which determines
      * determines the stores need to be flushed when flushing a region. The
@@ -1169,7 +1202,7 @@ public class TableDescriptorBuilder {
           printCommaForAttr = true;
           s.append(key);
           s.append(" => ");
-          s.append('\'').append(value).append('\'');
+          s.append('\'').append(PrettyPrinter.format(value, getUnit(key))).append('\'');
         }
 
         if (!userKeys.isEmpty()) {
@@ -1189,7 +1222,7 @@ public class TableDescriptorBuilder {
             printCommaForCfg = true;
             s.append('\'').append(key).append('\'');
             s.append(" => ");
-            s.append('\'').append(value).append('\'');
+            s.append('\'').append(PrettyPrinter.format(value, getUnit(key))).append('\'');
           }
           s.append("}");
         }
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
index c29c835..425d590 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java
@@ -28,6 +28,7 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.exceptions.HBaseException;
 import org.apache.hadoop.hbase.rsgroup.RSGroupInfo;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -218,6 +219,33 @@ public class TestTableDescriptorBuilder {
     assertEquals(1111L, desc.getMaxFileSize());
   }
 
+  @Test
+  public void testSetMaxFileSize() throws HBaseException {
+    TableDescriptorBuilder builder =
+      TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()));
+
+    String maxFileSize = "1073741824";
+    builder.setMaxFileSize(maxFileSize);
+    assertEquals(1073741824, builder.build().getMaxFileSize());
+
+    maxFileSize = "1GB";
+    builder.setMaxFileSize(maxFileSize);
+    assertEquals(1073741824, builder.build().getMaxFileSize());
+
+    maxFileSize = "10GB 25MB";
+    builder.setMaxFileSize(maxFileSize);
+    assertEquals(10763632640L, builder.build().getMaxFileSize());
+
+    // ignore case
+    maxFileSize = "10GB 512mb 512KB 512b";
+    builder.setMaxFileSize(maxFileSize);
+    assertEquals(11274813952L, builder.build().getMaxFileSize());
+
+    maxFileSize = "10737942528 B (10GB 512KB)";
+    builder.setMaxFileSize(maxFileSize);
+    assertEquals(10737942528L, builder.build().getMaxFileSize());
+  }
+
   /**
    * Test default value handling for memStoreFlushSize
    */
@@ -232,6 +260,33 @@ public class TestTableDescriptorBuilder {
   }
 
   @Test
+  public void testSetMemStoreFlushSize() throws HBaseException {
+    TableDescriptorBuilder builder =
+      TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()));
+
+    String memstoreFlushSize = "1073741824";
+    builder.setMemStoreFlushSize(memstoreFlushSize);
+    assertEquals(1073741824, builder.build().getMemStoreFlushSize());
+
+    memstoreFlushSize = "1GB";
+    builder.setMemStoreFlushSize(memstoreFlushSize);
+    assertEquals(1073741824, builder.build().getMemStoreFlushSize());
+
+    memstoreFlushSize = "10GB 25MB";
+    builder.setMemStoreFlushSize(memstoreFlushSize);
+    assertEquals(10763632640L, builder.build().getMemStoreFlushSize());
+
+    // ignore case
+    memstoreFlushSize = "10GB 512mb 512KB 512b";
+    builder.setMemStoreFlushSize(memstoreFlushSize);
+    assertEquals(11274813952L, builder.build().getMemStoreFlushSize());
+
+    memstoreFlushSize = "10737942528 B (10GB 512KB)";
+    builder.setMemStoreFlushSize(memstoreFlushSize);
+    assertEquals(10737942528L, builder.build().getMemStoreFlushSize());
+  }
+
+  @Test
   public void testClassMethodsAreBuilderStyle() {
     BuilderStyleTest.assertClassesAreBuilderStyle(TableDescriptorBuilder.class);
   }
@@ -281,7 +336,7 @@ public class TestTableDescriptorBuilder {
   }
 
   @Test
-  public void testStringCustomizedValues() {
+  public void testStringCustomizedValues() throws HBaseException {
     byte[] familyName = Bytes.toBytes("cf");
     ColumnFamilyDescriptor hcd =
       ColumnFamilyDescriptorBuilder.newBuilder(familyName).setBlocksize(1000).build();
@@ -292,6 +347,13 @@ public class TestTableDescriptorBuilder {
       "'testStringCustomizedValues', " +
         "{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL'}}, {NAME => 'cf', BLOCKSIZE => '1000'}",
       htd.toStringCustomizedValues());
+
+    htd = TableDescriptorBuilder.newBuilder(htd).setMaxFileSize("10737942528").build();
+    assertEquals(
+      "'testStringCustomizedValues', " +
+        "{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL', "
+        + "MAX_FILESIZE => '10737942528 B (10GB 512KB)'}}, {NAME => 'cf', BLOCKSIZE => '1000'}",
+      htd.toStringCustomizedValues());
   }
 
   @Test
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 9a6912a..d31cadd 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -700,6 +700,14 @@ public final class HConstants {
   public static final int HOUR_IN_SECONDS = 60 * 60;
   public static final int MINUTE_IN_SECONDS = 60;
 
+  /**
+   * KB, MB, GB, TB equivalent to how many bytes
+   */
+  public static final long KB_IN_BYTES = 1024;
+  public static final long MB_IN_BYTES = 1024 * KB_IN_BYTES;
+  public static final long GB_IN_BYTES = 1024 * MB_IN_BYTES;
+  public static final long TB_IN_BYTES = 1024 * GB_IN_BYTES;
+
   //TODO: although the following are referenced widely to format strings for
   //      the shell. They really aren't a part of the public API. It would be
   //      nice if we could put them somewhere where they did not need to be
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
index ff7064b..83eb01a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
@@ -41,11 +41,17 @@ public final class PrettyPrinter {
           "((\\d+)\\s*MINUTES?)?\\s*((\\d+)\\s*SECONDS?)?\\s*\\)?";
   private static final Pattern INTERVAL_PATTERN = Pattern.compile(INTERVAL_REGEX,
           Pattern.CASE_INSENSITIVE);
+  private static final String SIZE_REGEX = "((\\d+)\\s*B?\\s*\\()?\\s*" +
+    "((\\d+)\\s*TB?)?\\s*((\\d+)\\s*GB?)?\\s*" +
+    "((\\d+)\\s*MB?)?\\s*((\\d+)\\s*KB?)?\\s*((\\d+)\\s*B?)?\\s*\\)?";
+  private static final Pattern SIZE_PATTERN = Pattern.compile(SIZE_REGEX,
+    Pattern.CASE_INSENSITIVE);
 
   public enum Unit {
     TIME_INTERVAL,
     LONG,
     BOOLEAN,
+    BYTE,
     NONE
   }
 
@@ -63,6 +69,9 @@ public final class PrettyPrinter {
         byte[] booleanBytes = Bytes.toBytesBinary(value);
         human.append(String.valueOf(Bytes.toBoolean(booleanBytes)));
         break;
+      case BYTE:
+        human.append(humanReadableByte(Long.parseLong(value)));
+        break;
       default:
         human.append(value);
     }
@@ -82,6 +91,9 @@ public final class PrettyPrinter {
       case TIME_INTERVAL:
         value.append(humanReadableIntervalToSec(pretty));
         break;
+      case BYTE:
+        value.append(humanReadableSizeToBytes(pretty));
+        break;
       default:
         value.append(pretty);
     }
@@ -192,6 +204,116 @@ public final class PrettyPrinter {
   }
 
   /**
+   * Convert a long size to a human readable string.
+   * Example: 10763632640 -> 10763632640 B (10GB 25MB)
+   * @param size the size in bytes
+   * @return human readable string
+   */
+  private static String humanReadableByte(final long size) {
+    StringBuilder sb = new StringBuilder();
+    long tb, gb, mb, kb, b;
+
+    if (size < HConstants.KB_IN_BYTES) {
+      sb.append(size);
+      sb.append(" B");
+      return sb.toString();
+    }
+
+    tb = size / HConstants.TB_IN_BYTES;
+    gb = (size - HConstants.TB_IN_BYTES * tb) / HConstants.GB_IN_BYTES;
+    mb = (size - HConstants.TB_IN_BYTES * tb
+      - HConstants.GB_IN_BYTES * gb) / HConstants.MB_IN_BYTES;
+    kb = (size - HConstants.TB_IN_BYTES * tb - HConstants.GB_IN_BYTES * gb
+      - HConstants.MB_IN_BYTES * mb) / HConstants.KB_IN_BYTES;
+    b = (size - HConstants.TB_IN_BYTES * tb - HConstants.GB_IN_BYTES * gb
+      - HConstants.MB_IN_BYTES * mb - HConstants.KB_IN_BYTES * kb);
+
+    sb.append(size).append(" B (");
+    if (tb > 0) {
+      sb.append(tb);
+      sb.append("TB");
+    }
+
+    if (gb > 0) {
+      sb.append(tb > 0 ? " " : "");
+      sb.append(gb);
+      sb.append("GB");
+    }
+
+    if (mb > 0) {
+      sb.append(tb + gb > 0 ? " " : "");
+      sb.append(mb);
+      sb.append("MB");
+    }
+
+    if (kb > 0) {
+      sb.append(tb + gb + mb > 0 ? " " : "");
+      sb.append(kb);
+      sb.append("KB");
+    }
+
+    if (b > 0) {
+      sb.append(tb + gb + mb + kb > 0 ? " " : "");
+      sb.append(b);
+      sb.append("B");
+    }
+
+    sb.append(")");
+    return sb.toString();
+  }
+
+  /**
+   * Convert a human readable size to bytes.
+   * Examples of the human readable size are: 50 GB 20 MB 1 KB , 25000 B etc.
+   * The units of size specified can be in uppercase as well as lowercase. Also, if a
+   * single number is specified without any time unit, it is assumed to be in bytes.
+   * @param humanReadableSize human readable size
+   * @return value in bytes
+   * @throws HBaseException
+   */
+  private static long humanReadableSizeToBytes(final String humanReadableSize)
+      throws HBaseException {
+    if (humanReadableSize == null) {
+      return -1;
+    }
+
+    try {
+      return Long.parseLong(humanReadableSize);
+    } catch(NumberFormatException ex) {
+      LOG.debug("Given size value is not a number, parsing for human readable format");
+    }
+
+    String tb = null;
+    String gb = null;
+    String mb = null;
+    String kb = null;
+    String b = null;
+    String expectedSize = null;
+    long size = 0;
+
+    Matcher matcher = PrettyPrinter.SIZE_PATTERN.matcher(humanReadableSize);
+    if (matcher.matches()) {
+      expectedSize = matcher.group(2);
+      tb = matcher.group(4);
+      gb = matcher.group(6);
+      mb = matcher.group(8);
+      kb = matcher.group(10);
+      b = matcher.group(12);
+    }
+    size += tb != null ? Long.parseLong(tb)*HConstants.TB_IN_BYTES:0;
+    size += gb != null ? Long.parseLong(gb)*HConstants.GB_IN_BYTES:0;
+    size += mb != null ? Long.parseLong(mb)*HConstants.MB_IN_BYTES:0;
+    size += kb != null ? Long.parseLong(kb)*HConstants.KB_IN_BYTES:0;
+    size += b != null ? Long.parseLong(b):0;
+
+    if (expectedSize != null && Long.parseLong(expectedSize) != size) {
+      throw new HBaseException("Malformed size string: values in byte and human readable" +
+        "format do not match");
+    }
+    return size;
+  }
+
+  /**
    * Pretty prints a collection of any type to a string. Relies on toString() implementation of the
    * object type.
    * @param collection collection to pretty print.
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb
index ee54ae7..6228ad7 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -1472,7 +1472,7 @@ module Hbase
     # Parse arguments and update TableDescriptorBuilder accordingly
     # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity
     def update_tdb_from_arg(tdb, arg)
-      tdb.setMaxFileSize(JLong.valueOf(arg.delete(TableDescriptorBuilder::MAX_FILESIZE))) if arg.include?(TableDescriptorBuilder::MAX_FILESIZE)
+      tdb.setMaxFileSize(arg.delete(TableDescriptorBuilder::MAX_FILESIZE)) if arg.include?(TableDescriptorBuilder::MAX_FILESIZE)
       tdb.setReadOnly(JBoolean.valueOf(arg.delete(TableDescriptorBuilder::READONLY))) if arg.include?(TableDescriptorBuilder::READONLY)
       tdb.setCompactionEnabled(JBoolean.valueOf(arg.delete(TableDescriptorBuilder::COMPACTION_ENABLED))) if arg.include?(TableDescriptorBuilder::COMPACTION_ENABLED)
       tdb.setSplitEnabled(JBoolean.valueOf(arg.delete(TableDescriptorBuilder::SPLIT_ENABLED))) if arg.include?(TableDescriptorBuilder::SPLIT_ENABLED)
@@ -1480,7 +1480,7 @@ module Hbase
       tdb.setNormalizationEnabled(JBoolean.valueOf(arg.delete(TableDescriptorBuilder::NORMALIZATION_ENABLED))) if arg.include?(TableDescriptorBuilder::NORMALIZATION_ENABLED)
       tdb.setNormalizerTargetRegionCount(JInteger.valueOf(arg.delete(TableDescriptorBuilder::NORMALIZER_TARGET_REGION_COUNT))) if arg.include?(TableDescriptorBuilder::NORMALIZER_TARGET_REGION_COUNT)
       tdb.setNormalizerTargetRegionSize(JLong.valueOf(arg.delete(TableDescriptorBuilder::NORMALIZER_TARGET_REGION_SIZE))) if arg.include?(TableDescriptorBuilder::NORMALIZER_TARGET_REGION_SIZE)
-      tdb.setMemStoreFlushSize(JLong.valueOf(arg.delete(TableDescriptorBuilder::MEMSTORE_FLUSHSIZE))) if arg.include?(TableDescriptorBuilder::MEMSTORE_FLUSHSIZE)
+      tdb.setMemStoreFlushSize(arg.delete(TableDescriptorBuilder::MEMSTORE_FLUSHSIZE)) if arg.include?(TableDescriptorBuilder::MEMSTORE_FLUSHSIZE)
       tdb.setDurability(org.apache.hadoop.hbase.client.Durability.valueOf(arg.delete(TableDescriptorBuilder::DURABILITY))) if arg.include?(TableDescriptorBuilder::DURABILITY)
       tdb.setPriority(JInteger.valueOf(arg.delete(TableDescriptorBuilder::PRIORITY))) if arg.include?(TableDescriptorBuilder::PRIORITY)
       tdb.setFlushPolicyClassName(arg.delete(TableDescriptorBuilder::FLUSH_POLICY)) if arg.include?(TableDescriptorBuilder::FLUSH_POLICY)