You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ds...@apache.org on 2015/09/10 11:46:34 UTC

ambari git commit: AMBARI-13049 AMS: IOException: maxStamp is smaller than minStamp (dsen)

Repository: ambari
Updated Branches:
  refs/heads/trunk 9659891e6 -> eccadf9ce


AMBARI-13049 AMS: IOException: maxStamp is smaller than minStamp (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eccadf9c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eccadf9c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eccadf9c

Branch: refs/heads/trunk
Commit: eccadf9ced4290662808bc731d0812f991d9cf91
Parents: 9659891
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Sep 10 12:46:19 2015 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Sep 10 12:46:19 2015 +0300

----------------------------------------------------------------------
 .../metrics/timeline/PhoenixHBaseAccessor.java  |  30 ++++-
 .../timeline/PhoenixHBaseAccessorTest.java      | 124 +++++++++++++++++++
 2 files changed, 150 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eccadf9c/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index e052abe..7dfd8a7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -450,13 +450,35 @@ public class PhoenixHBaseAccessor {
       if(condition.isPointInTime()){
         getLatestMetricRecords(condition, conn, metrics);
       } else {
-        stmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt(conn, condition);
-        rs = stmt.executeQuery();
-        while (rs.next()) {
-          appendMetricFromResultSet(metrics, condition, metricFunctions, rs);
+        if (condition.getEndTime() >= condition.getStartTime()) {
+          stmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt(conn, condition);
+          rs = stmt.executeQuery();
+          while (rs.next()) {
+            appendMetricFromResultSet(metrics, condition, metricFunctions, rs);
+          }
+        } else {
+          LOG.warn("Skipping metrics query because endTime < startTime");
         }
       }
 
+    } catch (RuntimeException ex) {
+      // We need to find out if this is a real IO exception
+      // or exception "maxStamp is smaller than minStamp"
+      // which is thrown in hbase TimeRange.java
+      Throwable io = ex.getCause();
+      String className = null;
+      for (StackTraceElement ste : io.getStackTrace()) {
+        className = ste.getClassName();
+      }
+      if (className != null && className.equals("TimeRange")) {
+        // This is "maxStamp is smaller than minStamp" exception
+        // Log error and return empty metrics
+        LOG.debug(io);
+        return new TimelineMetrics();
+      } else {
+        throw ex;
+      }
+
     } finally {
       if (rs != null) {
         try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/eccadf9c/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessorTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessorTest.java
new file mode 100644
index 0000000..fea4b2f
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessorTest.java
@@ -0,0 +1,124 @@
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.ConnectionProvider;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.easymock.PowerMock;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Created by user on 9/7/15.
+ */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(PhoenixTransactSQL.class)
+public class PhoenixHBaseAccessorTest {
+  private static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
+
+  @Test
+  public void testGetMetricRecords() throws SQLException, IOException {
+
+    Configuration hbaseConf = new Configuration();
+    hbaseConf.setStrings(ZOOKEEPER_QUORUM, "quorum");
+    Configuration metricsConf = new Configuration();
+
+    ConnectionProvider connectionProvider = new ConnectionProvider() {
+      @Override
+      public Connection getConnection() throws SQLException {
+        return null;
+      }
+    };
+
+    PhoenixHBaseAccessor accessor = new PhoenixHBaseAccessor(hbaseConf, metricsConf, connectionProvider);
+
+    List<String> metricNames = new LinkedList<>();
+    List<String> hostnames = new LinkedList<>();
+    Map<String, List<Function>> metricFunctions = new HashMap<>();
+
+    PowerMock.mockStatic(PhoenixTransactSQL.class);
+    PreparedStatement preparedStatementMock = EasyMock.createNiceMock(PreparedStatement.class);
+    Condition condition = new DefaultCondition(metricNames, hostnames, "appid", "instanceid", 123L, 234L, Precision.SECONDS, 10, true);
+    EasyMock.expect(PhoenixTransactSQL.prepareGetMetricsSqlStmt(null, condition)).andReturn(preparedStatementMock).once();
+    ResultSet rsMock = EasyMock.createNiceMock(ResultSet.class);
+    EasyMock.expect(preparedStatementMock.executeQuery()).andReturn(rsMock);
+
+
+    PowerMock.replayAll();
+    EasyMock.replay(preparedStatementMock, rsMock);
+
+    // Check when startTime < endTime
+    TimelineMetrics tml = accessor.getMetricRecords(condition, metricFunctions);
+
+    // Check when startTime > endTime
+    Condition condition2 = new DefaultCondition(metricNames, hostnames, "appid", "instanceid", 234L, 123L, Precision.SECONDS, 10, true);
+    TimelineMetrics tml2 = accessor.getMetricRecords(condition2, metricFunctions);
+    assertEquals(0, tml2.getMetrics().size());
+
+    PowerMock.verifyAll();
+    EasyMock.verify(preparedStatementMock, rsMock);
+  }
+
+  @Test
+  public void testGetMetricRecordsException() throws SQLException, IOException {
+
+    Configuration hbaseConf = new Configuration();
+    hbaseConf.setStrings(ZOOKEEPER_QUORUM, "quorum");
+    Configuration metricsConf = new Configuration();
+
+    ConnectionProvider connectionProvider = new ConnectionProvider() {
+      @Override
+      public Connection getConnection() throws SQLException {
+        return null;
+      }
+    };
+
+    PhoenixHBaseAccessor accessor = new PhoenixHBaseAccessor(hbaseConf, metricsConf, connectionProvider);
+
+    List<String> metricNames = new LinkedList<>();
+    List<String> hostnames = new LinkedList<>();
+    Map<String, List<Function>> metricFunctions = new HashMap<>();
+
+    PowerMock.mockStatic(PhoenixTransactSQL.class);
+    PreparedStatement preparedStatementMock = EasyMock.createNiceMock(PreparedStatement.class);
+    Condition condition = new DefaultCondition(metricNames, hostnames, "appid", "instanceid", 123L, 234L, Precision.SECONDS, 10, true);
+    EasyMock.expect(PhoenixTransactSQL.prepareGetMetricsSqlStmt(null, condition)).andReturn(preparedStatementMock).once();
+    ResultSet rsMock = EasyMock.createNiceMock(ResultSet.class);
+    RuntimeException runtimeException = EasyMock.createNiceMock(RuntimeException.class);
+    IOException io = EasyMock.createNiceMock(IOException.class);
+    EasyMock.expect(preparedStatementMock.executeQuery()).andThrow(runtimeException);
+    EasyMock.expect(runtimeException.getCause()).andReturn(io).atLeastOnce();
+    StackTraceElement stackTrace[] = new StackTraceElement[]{new StackTraceElement("TimeRange","method","file",1)};
+    EasyMock.expect(io.getStackTrace()).andReturn(stackTrace).atLeastOnce();
+
+
+    PowerMock.replayAll();
+    EasyMock.replay(preparedStatementMock, rsMock, io, runtimeException);
+
+    TimelineMetrics tml = accessor.getMetricRecords(condition, metricFunctions);
+
+    assertEquals(0, tml.getMetrics().size());
+
+    PowerMock.verifyAll();
+    EasyMock.verify(preparedStatementMock, rsMock, io, runtimeException);
+  }
+
+}