You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by av...@apache.org on 2016/04/12 01:37:32 UTC

[1/2] ambari git commit: AMBARI-15749 : Add AMS post processing function to show the metrics 'diff' over time (avijayan)

Repository: ambari
Updated Branches:
  refs/heads/trunk 128f26ce9 -> 45bac47f8


AMBARI-15749 : Add AMS post processing function to show the metrics 'diff' over time (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45bac47f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45bac47f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45bac47f

Branch: refs/heads/trunk
Commit: 45bac47f8980cef1871155b5b3dd201c3055bbcd
Parents: 602fb5d
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Apr 11 16:23:21 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Mon Apr 11 16:37:19 2016 -0700

----------------------------------------------------------------------
 .../timeline/HBaseTimelineMetricStore.java      | 14 ++++++++-----
 .../metrics/timeline/aggregators/Function.java  |  3 ++-
 .../metrics/timeline/FunctionTest.java          | 10 ++++++++++
 .../timeline/HBaseTimelineMetricStoreTest.java  | 21 +++++++++++++++++++-
 4 files changed, 41 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/45bac47f/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
index a5204e1..ab11333 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -42,6 +42,7 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -211,30 +212,33 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     for (TimelineMetric metric : metricsList){
       String name = metric.getMetricName();
       if (name.contains("._rate")){
-        updateValuesAsRate(metric.getMetricValues());
+        updateValuesAsRate(metric.getMetricValues(), false);
+      } else if (name.contains("._diff")) {
+        updateValuesAsRate(metric.getMetricValues(), true);
       }
     }
 
     return metrics;
   }
 
-  static Map<Long, Double> updateValuesAsRate(Map<Long, Double> metricValues) {
+  static Map<Long, Double> updateValuesAsRate(Map<Long, Double> metricValues, boolean isDiff) {
     Long prevTime = null;
     Double prevVal = null;
     long step;
     Double diff;
 
-    for (Map.Entry<Long, Double> timeValueEntry : metricValues.entrySet()) {
+    for(Iterator<Map.Entry<Long, Double>> it = metricValues.entrySet().iterator(); it.hasNext(); ) {
+      Map.Entry<Long, Double> timeValueEntry = it.next();
       Long currTime = timeValueEntry.getKey();
       Double currVal = timeValueEntry.getValue();
 
       if (prevTime != null) {
         step = currTime - prevTime;
         diff = currVal - prevVal;
-        Double rate = diff / TimeUnit.MILLISECONDS.toSeconds(step);
+        Double rate = isDiff ? diff : (diff / TimeUnit.MILLISECONDS.toSeconds(step));
         timeValueEntry.setValue(rate);
       } else {
-        timeValueEntry.setValue(0.0);
+        it.remove();
       }
 
       prevTime = currTime;

http://git-wip-us.apache.org/repos/asf/ambari/blob/45bac47f/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
index 6f408a5..ab9d2e9 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/Function.java
@@ -125,7 +125,8 @@ public class Function {
 
   public enum PostProcessingFunction {
     NONE(""),
-    RATE("._rate");
+    RATE("._rate"),
+    DIFF("._diff");
 
     PostProcessingFunction(String suffix){
       this.suffix = suffix;

http://git-wip-us.apache.org/repos/asf/ambari/blob/45bac47f/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
index 46bc6f8..188f634 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/FunctionTest.java
@@ -24,6 +24,7 @@ import org.junit.Test;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.fromMetricName;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.ReadFunction.AVG;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.PostProcessingFunction.RATE;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.PostProcessingFunction.DIFF;
 import static org.assertj.core.api.Assertions.assertThat;
 
 public class FunctionTest {
@@ -42,6 +43,15 @@ public class FunctionTest {
     // Rate support without aggregates
     f = fromMetricName("Metric._rate");
     assertThat(f).isEqualTo(new Function(null, RATE));
+
+    // Diff support
+    f = fromMetricName("Metric._diff._avg");
+    assertThat(f).isEqualTo(new Function(AVG, DIFF));
+
+    // Diff support without aggregates
+    f = fromMetricName("Metric._diff");
+    assertThat(f).isEqualTo(new Function(null, DIFF));
+
   }
 
   @Ignore // If unknown function: behavior is best effort query without function

http://git-wip-us.apache.org/repos/asf/ambari/blob/45bac47f/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
index 512a7db..29e2664 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStoreTest.java
@@ -23,9 +23,11 @@ import org.junit.Test;
 
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.ReadFunction.AVG;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function.PostProcessingFunction.RATE;
@@ -78,7 +80,7 @@ public class HBaseTimelineMetricStoreTest {
     metricValues.put(1454016728371L, 1011.25);
 
     // Calculate rate
-    Map<Long, Double> rates = HBaseTimelineMetricStore.updateValuesAsRate(new TreeMap<>(metricValues));
+    Map<Long, Double> rates = HBaseTimelineMetricStore.updateValuesAsRate(new TreeMap<>(metricValues), false);
 
     // Make sure rate is zero
     for (Map.Entry<Long, Double> rateEntry : rates.entrySet()) {
@@ -86,4 +88,21 @@ public class HBaseTimelineMetricStoreTest {
           + ", value = " + rateEntry.getValue(), 0.0, rateEntry.getValue());
     }
   }
+
+  @Test
+  public void testDiffCalculation() throws Exception {
+    Map<Long, Double> metricValues = new TreeMap<>();
+    metricValues.put(1454016368371L, 1011.25);
+    metricValues.put(1454016428371L, 1010.25);
+    metricValues.put(1454016488371L, 1012.25);
+    metricValues.put(1454016548371L, 1010.25);
+    metricValues.put(1454016608371L, 1010.25);
+
+    Map<Long, Double> rates = HBaseTimelineMetricStore.updateValuesAsRate(new TreeMap<>(metricValues), true);
+
+    Assert.assertTrue(rates.size()==4);
+    Assert.assertTrue(rates.containsValue(-1.0));
+    Assert.assertTrue(rates.containsValue(2.0));
+    Assert.assertTrue(rates.containsValue(0.0));
+  }
 }


[2/2] ambari git commit: AMBARI-15694 : AMS returns truncated results when it exceeds the metrics service default result limit config (avijayan)

Posted by av...@apache.org.
AMBARI-15694 : AMS returns truncated results when it exceeds the metrics service default result limit config (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/602fb5d2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/602fb5d2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/602fb5d2

Branch: refs/heads/trunk
Commit: 602fb5d2058e241b99eb98f9ecfb88332a6c75af
Parents: 128f26c
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Apr 11 16:19:18 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Mon Apr 11 16:37:19 2016 -0700

----------------------------------------------------------------------
 .../timeline/query/PhoenixTransactSQL.java      |  17 ++-
 .../timeline/TestPhoenixTransactSQL.java        | 132 +++++++++++++++++++
 2 files changed, 144 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/602fb5d2/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
index c8cef27..0efa68f 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
@@ -28,6 +28,7 @@ import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
@@ -484,17 +485,23 @@ public class PhoenixTransactSQL {
         rowsPerMetric = TimeUnit.MILLISECONDS.toHours(range);
         break;
       case MINUTES:
-        rowsPerMetric = TimeUnit.MILLISECONDS.toMinutes(range)/2; //2 minute data in METRIC_AGGREGATE_MINUTE table.
+        rowsPerMetric = TimeUnit.MILLISECONDS.toMinutes(range)/5; //5 minute data in METRIC_AGGREGATE_MINUTE table.
         break;
       default:
         rowsPerMetric = TimeUnit.MILLISECONDS.toSeconds(range)/10; //10 second data in METRIC_AGGREGATE table
     }
 
-    long totalRowsRequested = rowsPerMetric * condition.getMetricNames().size();
+    List<String> hostNames = condition.getHostnames();
+    int numHosts = (hostNames == null || hostNames.isEmpty()) ? 1 : condition.getHostnames().size();
+
+    long totalRowsRequested = rowsPerMetric * condition.getMetricNames().size() * numHosts;
+
     if (totalRowsRequested > PhoenixHBaseAccessor.RESULTSET_LIMIT) {
-      throw new PrecisionLimitExceededException("Requested precision (" + precision + ") for given time range causes " +
-        "result set size of " + totalRowsRequested + ", which exceeds the limit - "
-        + PhoenixHBaseAccessor.RESULTSET_LIMIT + ". Please request higher precision.");
+      throw new PrecisionLimitExceededException("Requested " +  condition.getMetricNames().size() + " metrics for "
+        + numHosts + " hosts in " + precision +  " precision for the time range of " + range/1000
+        + " seconds. Estimated resultset size of " + totalRowsRequested + " is greater than the limit of "
+        + PhoenixHBaseAccessor.RESULTSET_LIMIT + ". Request lower precision or fewer number of metrics or hosts." +
+        " Alternatively, increase the limit value through ams-site:timeline.metrics.service.default.result.limit config");
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/602fb5d2/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
index 6bf15c7..9c6617c 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
+import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
@@ -29,8 +30,11 @@ import java.sql.Connection;
 import java.sql.ParameterMetaData;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
+
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
@@ -422,4 +426,132 @@ public class TestPhoenixTransactSQL {
     Assert.assertTrue(stmt.contains("FROM METRIC_RECORD_HOURLY"));
     verify(connection, preparedStatement);
   }
+
+  @Test
+  public void testResultSetLimitCheck() throws SQLException {
+
+    List<String> metrics = new ArrayList<String>();
+    List<String> hosts = new ArrayList<String>();
+    int numMetrics = 0;
+    int numHosts = 0;
+    int limit = PhoenixHBaseAccessor.RESULTSET_LIMIT;
+
+    // 22 Metrics x 2 Hosts x 1 hour with requested SECONDS precision = 15840 points. Should be OK!
+    numMetrics = 22;
+    numHosts = 2;
+    for (int i = 0; i < numMetrics; i++) {
+      metrics.add("TestMetric"+i);
+    }
+    for (int i = 0; i < numHosts; i++) {
+      hosts.add("TestHost"+i);
+    }
+
+    Condition condition = new DefaultCondition(
+      metrics, hosts,
+      "a1", "i1", 1407950000L, 1407953600L, Precision.SECONDS, null, false);
+    Connection connection = createNiceMock(Connection.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    Capture<String> stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+      .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    String stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD"));
+    verify(connection, preparedStatement);
+
+    //Check without passing precision. Should be OK!
+    condition = new DefaultCondition(
+      metrics, hosts,
+      "a1", "i1", 1407950000L, 1407953600L, null, null, false);
+    connection = createNiceMock(Connection.class);
+    preparedStatement = createNiceMock(PreparedStatement.class);
+    stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+      .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD"));
+    verify(connection, preparedStatement);
+
+    //Check with more hosts and lesser metrics for 1 day data = 11520 points Should be OK!
+    metrics.clear();
+    hosts.clear();
+    numMetrics = 2;
+    numHosts = 20;
+    for (int i = 0; i < numMetrics; i++) {
+      metrics.add("TestMetric"+i);
+    }
+    for (int i = 0; i < numHosts; i++) {
+      hosts.add("TestHost"+i);
+    }
+    condition = new DefaultCondition(
+      metrics, hosts,
+      "a1", "i1", 1407867200L, 1407953600L, null, null, false);
+    connection = createNiceMock(Connection.class);
+    preparedStatement = createNiceMock(PreparedStatement.class);
+    stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+      .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_RECORD_MINUTE"));
+    verify(connection, preparedStatement);
+
+    //Check with 20 metrics, NO hosts and 1 day data = 5760 points. Should be OK!
+    metrics.clear();
+    hosts.clear();
+    numMetrics = 20;
+    for (int i = 0; i < numMetrics; i++) {
+      metrics.add("TestMetric"+i);
+    }
+    condition = new DefaultCondition(
+      metrics, hosts,
+      "a1", "i1", 1407867200L, 1407953600L, null, null, false);
+    connection = createNiceMock(Connection.class);
+    preparedStatement = createNiceMock(PreparedStatement.class);
+    stmtCapture = new Capture<String>();
+    expect(connection.prepareStatement(EasyMock.and(EasyMock.anyString(), EasyMock.capture(stmtCapture))))
+      .andReturn(preparedStatement);
+
+    replay(connection, preparedStatement);
+    PhoenixTransactSQL.prepareGetAggregateSqlStmt(connection, condition);
+    stmt = stmtCapture.getValue();
+    Assert.assertTrue(stmt.contains("FROM METRIC_AGGREGATE_MINUTE"));
+    verify(connection, preparedStatement);
+
+    //Check with 5 hosts and 10 metrics for 1 hour data = 18000 points. Should throw out Exception!
+    metrics.clear();
+    hosts.clear();
+    numMetrics = 10;
+    numHosts = 5;
+    for (int i = 0; i < numMetrics; i++) {
+      metrics.add("TestMetric"+i);
+    }
+    for (int i = 0; i < numHosts; i++) {
+      hosts.add("TestHost"+i);
+    }
+    condition = new DefaultCondition(
+      metrics, hosts,
+      "a1", "i1", 1407950000L, 1407953600L, null, null, false);
+    boolean exceptionThrown = false;
+    boolean requestedSizeFoundInMessage = false;
+
+    try {
+      PhoenixTransactSQL.prepareGetMetricsSqlStmt(connection, condition);
+    } catch (PrecisionLimitExceededException pe) {
+      exceptionThrown = true;
+      String message = pe.getMessage();
+      if (message !=null && message.contains("18000")) {
+        requestedSizeFoundInMessage = true;
+      }
+    }
+    Assert.assertTrue(exceptionThrown);
+    Assert.assertTrue(requestedSizeFoundInMessage);
+  }
 }