You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by av...@apache.org on 2018/05/10 21:12:39 UTC

[ambari] 03/03: AMBARI-23804 : Refine AMS HBase region splitting calculation based on UUID work (Refactor Split point computation. Allow only Murmur3Hash).

This is an automated email from the ASF dual-hosted git repository.

avijayan pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git

commit d4d2767ae0d9c8e3043144df52bd5d6094b61171
Author: Aravindan Vijayan <av...@hortonworks.com>
AuthorDate: Thu May 10 11:46:21 2018 -0700

    AMBARI-23804 : Refine AMS HBase region splitting calculation based on UUID work (Refactor Split point computation. Allow only Murmur3Hash).
---
 .../metrics/core/timeline/PhoenixHBaseAccessor.java |  1 +
 .../timeline/TimelineMetricSplitPointComputer.java  | 19 ++++++++++---------
 .../discovery/TimelineMetricMetadataManager.java    | 20 +-------------------
 .../timeline/uuid/Murmur3HashUuidGenStrategy.java   | 13 ++++++++++++-
 .../TimelineMetricSplitPointComputerTest.java       | 21 ++++++++++++---------
 5 files changed, 36 insertions(+), 38 deletions(-)

diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
index dec7850..c684b0a 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/PhoenixHBaseAccessor.java
@@ -492,6 +492,7 @@ public class PhoenixHBaseAccessor {
     PreparedStatement pStmt = null;
     TimelineMetricSplitPointComputer splitPointComputer = new TimelineMetricSplitPointComputer(
       metricsConf, hbaseConf, metadataManagerInstance);
+    splitPointComputer.computeSplitPoints();
 
     String encoding = metricsConf.get(HBASE_ENCODING_SCHEME, DEFAULT_ENCODING);
     String compression = metricsConf.get(HBASE_COMPRESSION_SCHEME, DEFAULT_TABLE_COMPRESSION);
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputer.java
index 89bb843..05366cc 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputer.java
@@ -49,6 +49,11 @@ public class TimelineMetricSplitPointComputer {
   private static final int SLAVE_EQUIDISTANT_POINTS = 50;
   private static final int MASTER_EQUIDISTANT_POINTS = 5;
 
+  private double hbaseTotalHeapsize;
+  private double hbaseMemstoreUpperLimit;
+  private double hbaseMemstoreFlushSize;
+  private TimelineMetricMetadataManager timelineMetricMetadataManager = null;
+
   private List<byte[]> precisionSplitPoints = new ArrayList<>();
   private List<byte[]> aggregateSplitPoints = new ArrayList<>();
 
@@ -66,18 +71,14 @@ public class TimelineMetricSplitPointComputer {
       slaveComponents.addAll(Arrays.asList(componentsString.split(",")));
     }
 
-    double hbaseTotalHeapsize = metricsConf.getDouble("hbase_total_heapsize", 1024*1024*1024);
-    double hbaseMemstoreUpperLimit = hbaseConf.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.5);
-    double hbaseMemstoreFlushSize = hbaseConf.getDouble("hbase.hregion.memstore.flush.size", 134217728);
-
-    computeSplitPoints(hbaseTotalHeapsize, hbaseMemstoreUpperLimit, hbaseMemstoreFlushSize, timelineMetricMetadataManager);
+    this.timelineMetricMetadataManager = timelineMetricMetadataManager;
+    hbaseTotalHeapsize = metricsConf.getDouble("hbase_total_heapsize", 1024*1024*1024);
+    hbaseMemstoreUpperLimit = hbaseConf.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.3);
+    hbaseMemstoreFlushSize = hbaseConf.getDouble("hbase.hregion.memstore.flush.size", 134217728);
   }
 
 
-  private void computeSplitPoints(double hbaseTotalHeapsize,
-                                  double hbaseMemstoreUpperLimit,
-                                  double hbaseMemstoreFlushSize,
-                                  TimelineMetricMetadataManager timelineMetricMetadataManager) {
+  protected void computeSplitPoints() {
 
     double memstoreMaxMemory = hbaseMemstoreUpperLimit * hbaseTotalHeapsize;
     int maxInMemoryRegions = (int) ((memstoreMaxMemory / hbaseMemstoreFlushSize) - OTHER_TABLE_STATIC_REGIONS);
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/discovery/TimelineMetricMetadataManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/discovery/TimelineMetricMetadataManager.java
index 737c2ff..86226ec 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/discovery/TimelineMetricMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/discovery/TimelineMetricMetadataManager.java
@@ -397,18 +397,6 @@ public class TimelineMetricMetadataManager {
       }
     }
 
-    if (!HOSTED_APPS_MAP.isEmpty()) {
-      Map.Entry<String, TimelineMetricHostMetadata> entry = HOSTED_APPS_MAP.entrySet().iterator().next();
-      TimelineMetricHostMetadata timelineMetricHostMetadata = entry.getValue();
-      if (timelineMetricHostMetadata.getUuid() != null  && timelineMetricHostMetadata.getUuid().length == 16) {
-        HOSTNAME_UUID_LENGTH = 16;
-        uuidGenStrategy = new MD5UuidGenStrategy();
-      } else {
-        HOSTNAME_UUID_LENGTH = 4;
-        uuidGenStrategy = new Murmur3HashUuidGenStrategy();
-      }
-    }
-
     for (String host : HOSTED_APPS_MAP.keySet()) {
       TimelineMetricHostMetadata timelineMetricHostMetadata = HOSTED_APPS_MAP.get(host);
       if (timelineMetricHostMetadata != null && timelineMetricHostMetadata.getUuid() != null) {
@@ -423,13 +411,7 @@ public class TimelineMetricMetadataManager {
    * @return the UUID generator of type org.apache.ambari.metrics.core.timeline.uuid.MetricUuidGenStrategy
    */
   private MetricUuidGenStrategy getUuidStrategy(Configuration configuration) {
-    String strategy = configuration.get(TIMELINE_METRICS_UUID_GEN_STRATEGY, "");
-    if ("md5".equalsIgnoreCase(strategy)){
-      return new MD5UuidGenStrategy();
-    } else {
-      //Default
-      return new Murmur3HashUuidGenStrategy();
-    }
+    return new Murmur3HashUuidGenStrategy();
   }
 
   /**
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/uuid/Murmur3HashUuidGenStrategy.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/uuid/Murmur3HashUuidGenStrategy.java
index 9418aa4..af8cee5 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/uuid/Murmur3HashUuidGenStrategy.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/core/timeline/uuid/Murmur3HashUuidGenStrategy.java
@@ -24,7 +24,12 @@ import org.apache.commons.lang.StringUtils;
 
 public class Murmur3HashUuidGenStrategy implements MetricUuidGenStrategy{
 
-  @Override
+  /**
+   * Compute Murmur3Hash 16 byte UUID for a Metric-App-Instance.
+   * @param timelineClusterMetric input metric
+   * @param maxLength Max length of returned UUID. (Will always be 16 for this technique)
+   * @return 16 byte UUID.
+   */  @Override
   public byte[] computeUuid(TimelineClusterMetric timelineClusterMetric, int maxLength) {
 
     String metricString = timelineClusterMetric.getMetricName() + timelineClusterMetric.getAppId();
@@ -35,6 +40,12 @@ public class Murmur3HashUuidGenStrategy implements MetricUuidGenStrategy{
     return Hashing.murmur3_128().hashBytes(metricBytes).asBytes();
   }
 
+  /**
+   * Compute Murmur3Hash 4 byte UUID for a String.
+   * @param value String input
+   * @param maxLength Max length of returned UUID. (Will always be 4 for this technique)
+   * @return 4 byte UUID.
+   */
   @Override
   public byte[] computeUuid(String value, int maxLength) {
     byte[] valueBytes = value.getBytes();
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputerTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputerTest.java
index 4d663cc..150dac2 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputerTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/ambari/metrics/core/timeline/TimelineMetricSplitPointComputerTest.java
@@ -51,7 +51,7 @@ public class TimelineMetricSplitPointComputerTest {
     expect(metricsConfMock.getDouble("hbase_total_heapsize", 1024*1024*1024)).andReturn(1024 * 1024 * 1024.0).once();
 
     Configuration hbaseConfMock = EasyMock.createMock(Configuration.class);
-    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.5)).andReturn(0.5).once();
+    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.3)).andReturn(0.3).once();
     expect(hbaseConfMock.getDouble("hbase.hregion.memstore.flush.size", 134217728)).andReturn(134217728.0).once();
 
     TimelineMetricMetadataManager metricMetadataManagerMock = EasyMock.createNiceMock(TimelineMetricMetadataManager.class);
@@ -62,6 +62,7 @@ public class TimelineMetricSplitPointComputerTest {
     TimelineMetricSplitPointComputer timelineMetricSplitPointComputer = new TimelineMetricSplitPointComputer(metricsConfMock,
       hbaseConfMock,
       metricMetadataManagerMock);
+    timelineMetricSplitPointComputer.computeSplitPoints();
 
     Assert.assertEquals(timelineMetricSplitPointComputer.getPrecisionSplitPoints().size(), 3);
     Assert.assertEquals(timelineMetricSplitPointComputer.getClusterAggregateSplitPoints().size(), 1);
@@ -85,7 +86,7 @@ public class TimelineMetricSplitPointComputerTest {
     expect(metricsConfMock.getDouble("hbase_total_heapsize", 1024*1024*1024)).andReturn(8589934592.0).once();
 
     Configuration hbaseConfMock = EasyMock.createMock(Configuration.class);
-    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.5)).andReturn(0.5).once();
+    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.3)).andReturn(0.3).once();
     expect(hbaseConfMock.getDouble("hbase.hregion.memstore.flush.size", 134217728)).andReturn(134217728.0).once();
 
     TimelineMetricMetadataManager metricMetadataManagerMock = EasyMock.createNiceMock(TimelineMetricMetadataManager.class);
@@ -96,10 +97,11 @@ public class TimelineMetricSplitPointComputerTest {
     TimelineMetricSplitPointComputer timelineMetricSplitPointComputer = new TimelineMetricSplitPointComputer(metricsConfMock,
       hbaseConfMock,
       metricMetadataManagerMock);
+    timelineMetricSplitPointComputer.computeSplitPoints();
 
-    Assert.assertEquals(timelineMetricSplitPointComputer.getPrecisionSplitPoints().size(), 16);
-    Assert.assertEquals(timelineMetricSplitPointComputer.getClusterAggregateSplitPoints().size(), 3);
-    Assert.assertEquals(timelineMetricSplitPointComputer.getHostAggregateSplitPoints().size(), 3);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getPrecisionSplitPoints().size(), 6);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getClusterAggregateSplitPoints().size(), 1);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getHostAggregateSplitPoints().size(), 1);
   }
 
   @Test
@@ -119,7 +121,7 @@ public class TimelineMetricSplitPointComputerTest {
     expect(metricsConfMock.getDouble("hbase_total_heapsize", 1024*1024*1024)).andReturn(24 * 1024 * 1024 * 1024.0).once();
 
     Configuration hbaseConfMock = EasyMock.createMock(Configuration.class);
-    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.5)).andReturn(0.5).once();
+    expect(hbaseConfMock.getDouble("hbase.regionserver.global.memstore.upperLimit", 0.3)).andReturn(0.3).once();
     expect(hbaseConfMock.getDouble("hbase.hregion.memstore.flush.size", 134217728)).andReturn(2 * 134217728.0).once();
 
     TimelineMetricMetadataManager metricMetadataManagerMock = EasyMock.createNiceMock(TimelineMetricMetadataManager.class);
@@ -130,9 +132,10 @@ public class TimelineMetricSplitPointComputerTest {
     TimelineMetricSplitPointComputer timelineMetricSplitPointComputer = new TimelineMetricSplitPointComputer(metricsConfMock,
       hbaseConfMock,
       metricMetadataManagerMock);
+    timelineMetricSplitPointComputer.computeSplitPoints();
 
-    Assert.assertEquals(timelineMetricSplitPointComputer.getPrecisionSplitPoints().size(), 28);
-    Assert.assertEquals(timelineMetricSplitPointComputer.getClusterAggregateSplitPoints().size(), 6);
-    Assert.assertEquals(timelineMetricSplitPointComputer.getHostAggregateSplitPoints().size(), 6);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getPrecisionSplitPoints().size(), 14);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getClusterAggregateSplitPoints().size(), 3);
+    Assert.assertEquals(timelineMetricSplitPointComputer.getHostAggregateSplitPoints().size(), 3);
   }
 }

-- 
To stop receiving notification emails like this one, please contact
avijayan@apache.org.