You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vr...@apache.org on 2016/06/21 23:49:20 UTC
[34/50] [abbrv] hadoop git commit: YARN-5015. entire time series is
returned for YARN container system metrics (CPU and memory) (Varun Saxena via
sjlee)
YARN-5015. entire time series is returned for YARN container system metrics (CPU and memory) (Varun Saxena via sjlee)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fc78a937
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fc78a937
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fc78a937
Branch: refs/heads/YARN-2928
Commit: fc78a937d80ef01712382c563a7a0c3e53da6a24
Parents: c998a52
Author: Sangjin Lee <sj...@apache.org>
Authored: Tue May 31 13:09:59 2016 -0700
Committer: Vrushali <vr...@twitter.com>
Committed: Sun Jun 19 00:20:12 2016 -0700
----------------------------------------------------------------------
...stTimelineReaderWebServicesHBaseStorage.java | 211 +++++++++++--
.../storage/TestHBaseTimelineStorage.java | 315 ++++++++++++++-----
.../storage/flow/TestHBaseStorageFlowRun.java | 26 +-
.../reader/TimelineDataToRetrieve.java | 32 +-
.../reader/TimelineEntityFilters.java | 5 +-
.../reader/TimelineReaderWebServices.java | 274 ++++++++++++----
.../reader/TimelineReaderWebServicesUtils.java | 17 +-
.../storage/reader/ApplicationEntityReader.java | 3 +-
.../storage/reader/FlowRunEntityReader.java | 12 +
.../storage/reader/GenericEntityReader.java | 4 +-
.../storage/reader/TimelineEntityReader.java | 5 +-
.../TestFileSystemTimelineReaderImpl.java | 12 +-
12 files changed, 711 insertions(+), 205 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc78a937/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index ca80ed5..f9f4607 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
@@ -112,13 +113,14 @@ public class TestTimelineReaderWebServicesHBaseStorage {
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues =
- ImmutableMap.of(ts - 100000, (Number)2, ts - 80000, 40);
+ ImmutableMap.of(ts - 100000, (Number)2, ts - 90000, 7, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("MAP1_SLOT_MILLIS");
- metricValues = ImmutableMap.of(ts - 100000, (Number)2, ts - 80000, 40);
+ metricValues =
+ ImmutableMap.of(ts - 100000, (Number)2, ts - 90000, 9, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
@@ -460,6 +462,7 @@ public class TestTimelineReaderWebServicesHBaseStorage {
assertNotNull(resp);
assertTrue("Response from server should have been " + status,
resp.getClientResponseStatus().equals(status));
+ System.out.println("Response is: " + resp.getEntity(String.class));
}
@Test
@@ -615,12 +618,18 @@ public class TestTimelineReaderWebServicesHBaseStorage {
(entity.getStartTime() == 1425016501034L) &&
(entity.getMetrics().size() == 1)));
}
+
+ // fields as CONFIGS will lead to a HTTP 400 as it makes no sense for
+ // flow runs.
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" +
+ "fields=CONFIGS");
+ verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
}
-
@Test
public void testGetFlowRunsMetricsToRetrieve() throws Exception {
Client client = createClient();
@@ -1024,15 +1033,12 @@ public class TestTimelineReaderWebServicesHBaseStorage {
assertNotNull(entity);
assertEquals("application_1111111111_1111", entity.getId());
assertEquals(3, entity.getMetrics().size());
- TimelineMetric m1 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "HDFS_BYTES_READ", ts - 100000, 31L);
- m1.addValue(ts - 80000, 57L);
- TimelineMetric m2 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "MAP_SLOT_MILLIS", ts - 100000, 2L);
- m2.addValue(ts - 80000, 40L);
- TimelineMetric m3 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "MAP1_SLOT_MILLIS", ts - 100000, 2L);
- m3.addValue(ts - 80000, 40L);
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "HDFS_BYTES_READ", ts - 80000, 57L);
+ TimelineMetric m2 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 40L);
+ TimelineMetric m3 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP1_SLOT_MILLIS", ts - 80000, 40L);
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(verifyMetrics(metric, m1, m2, m3));
}
@@ -1045,9 +1051,8 @@ public class TestTimelineReaderWebServicesHBaseStorage {
assertNotNull(entity);
assertEquals("application_1111111111_2222", entity.getId());
assertEquals(1, entity.getMetrics().size());
- TimelineMetric m4 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "MAP_SLOT_MILLIS", ts - 100000, 5L);
- m4.addValue(ts - 80000, 101L);
+ TimelineMetric m4 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(verifyMetrics(metric, m4));
}
@@ -1067,15 +1072,35 @@ public class TestTimelineReaderWebServicesHBaseStorage {
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("application_1111111111_1111", entity.getId());
+ assertEquals(1, entity.getConfigs().size());
+ assertEquals(3, entity.getMetrics().size());
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "HDFS_BYTES_READ", ts - 80000, 57L);
+ TimelineMetric m2 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 40L);
+ TimelineMetric m3 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP1_SLOT_MILLIS", ts - 80000, 40L);
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertTrue(verifyMetrics(metric, m1, m2, m3));
+ }
+
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/apps/application_1111111111_1111?" +
+ "fields=ALL&metricslimit=10");
+ resp = getResponse(client, uri);
+ entity = resp.getEntity(TimelineEntity.class);
+ assertNotNull(entity);
+ assertEquals("application_1111111111_1111", entity.getId());
+ assertEquals(1, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size());
- TimelineMetric m1 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "HDFS_BYTES_READ", ts - 100000, 31L);
+ m1 = newMetric(TimelineMetric.Type.TIME_SERIES, "HDFS_BYTES_READ",
+ ts - 100000, 31L);
m1.addValue(ts - 80000, 57L);
- TimelineMetric m2 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "MAP_SLOT_MILLIS", ts - 100000, 2L);
+ m2 = newMetric(TimelineMetric.Type.TIME_SERIES, "MAP_SLOT_MILLIS",
+ ts - 100000, 2L);
m2.addValue(ts - 80000, 40L);
- TimelineMetric m3 = newMetric(TimelineMetric.Type.TIME_SERIES,
- "MAP1_SLOT_MILLIS", ts - 100000, 2L);
+ m3 = newMetric(TimelineMetric.Type.TIME_SERIES, "MAP1_SLOT_MILLIS",
+ ts - 100000, 2L);
m3.addValue(ts - 80000, 40L);
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(verifyMetrics(metric, m1, m2, m3));
@@ -1229,11 +1254,6 @@ public class TestTimelineReaderWebServicesHBaseStorage {
}
}
assertEquals(2, metricCnt);
-
- uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
- "timeline/clusters/cluster1/apps/application_1111111111_1111/" +
- "entities/type1?metricstoretrieve=!(MAP1_,HDFS_");
- verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
@@ -1550,6 +1570,35 @@ public class TestTimelineReaderWebServicesHBaseStorage {
assertTrue(entity.getId().equals("entity2"));
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP1"));
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ }
+ }
+ assertEquals(2, metricCnt);
+
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/apps/application_1111111111_1111/" +
+ "entities/type1?metricfilters=(HDFS_BYTES_READ%20lt%2060%20AND%20" +
+ "MAP_SLOT_MILLIS%20gt%2040)%20OR%20(MAP1_SLOT_MILLIS%20ge" +
+ "%20140%20AND%20MAP11_SLOT_MILLIS%20le%20122)&metricstoretrieve=" +
+ "!(HDFS)&metricslimit=10");
+ resp = getResponse(client, uri);
+ entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+ assertNotNull(entities);
+ assertEquals(1, entities.size());
+ metricCnt = 0;
+ for (TimelineEntity entity : entities) {
+ metricCnt += entity.getMetrics().size();
+ assertTrue(entity.getId().equals("entity2"));
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertTrue(metric.getId().startsWith("MAP1"));
+ if (metric.getId().equals("MAP1_SLOT_MILLIS")) {
+ assertEquals(2, metric.getValues().size());
+ assertEquals(TimelineMetric.Type.TIME_SERIES, metric.getType());
+ } else if (metric.getId().equals("MAP11_SLOT_MILLIS")) {
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ } else {
+ fail("Unexpected metric id");
+ }
}
}
assertEquals(2, metricCnt);
@@ -1794,6 +1843,23 @@ public class TestTimelineReaderWebServicesHBaseStorage {
assertEquals(1, entity.getMetrics().size());
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP11_"));
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ assertEquals(1, metric.getValues().size());
+ }
+
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/apps/application_1111111111_1111/" +
+ "entities/type1/entity2?metricstoretrieve=!(MAP1_,HDFS_)&" +
+ "metricslimit=5");
+ resp = getResponse(client, uri);
+ entity = resp.getEntity(TimelineEntity.class);
+ assertNotNull(entity);
+ assertEquals("entity2", entity.getId());
+ assertEquals("type1", entity.getType());
+ assertEquals(1, entity.getMetrics().size());
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertTrue(metric.getId().startsWith("MAP11_"));
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
}
} finally {
client.destroy();
@@ -1818,6 +1884,29 @@ public class TestTimelineReaderWebServicesHBaseStorage {
entity.getMetrics().size() == 3) ||
(entity.getId().equals("application_1111111111_2222") &&
entity.getMetrics().size() == 1));
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ assertEquals(1, metric.getValues().size());
+ }
+ }
+
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/users/user1/flows/flow_name/runs/" +
+ "1002345678919/apps?fields=ALL&metricslimit=2");
+ resp = getResponse(client, uri);
+ entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+ assertNotNull(entities);
+ assertEquals(2, entities.size());
+ for (TimelineEntity entity : entities) {
+ assertTrue("Unexpected app in result",
+ (entity.getId().equals("application_1111111111_1111") &&
+ entity.getMetrics().size() == 3) ||
+ (entity.getId().equals("application_1111111111_2222") &&
+ entity.getMetrics().size() == 1));
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertTrue(metric.getValues().size() <= 2);
+ assertEquals(TimelineMetric.Type.TIME_SERIES, metric.getType());
+ }
}
// Query without specifying cluster ID.
@@ -1855,11 +1944,75 @@ public class TestTimelineReaderWebServicesHBaseStorage {
for (TimelineEntity entity : entities) {
assertTrue("Unexpected app in result",
(entity.getId().equals("application_1111111111_1111") &&
- entity.getMetrics().size() == 3) ||
+ entity.getConfigs().size() == 1 &&
+ entity.getConfigs().equals(ImmutableMap.of("cfg2", "value1"))) ||
(entity.getId().equals("application_1111111111_2222") &&
- entity.getMetrics().size() == 1) ||
+ entity.getConfigs().size() == 1 &&
+ entity.getConfigs().equals(ImmutableMap.of("cfg1", "value1"))) ||
(entity.getId().equals("application_1111111111_2224") &&
- entity.getMetrics().size() == 1));
+ entity.getConfigs().size() == 0));
+ for (TimelineMetric metric : entity.getMetrics()) {
+ if (entity.getId().equals("application_1111111111_1111")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "HDFS_BYTES_READ", ts - 80000, 57L);
+ TimelineMetric m2 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 40L);
+ TimelineMetric m3 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP1_SLOT_MILLIS", ts - 80000, 40L);
+ assertTrue(verifyMetrics(metric, m1, m2, m3));
+ } else if (entity.getId().equals("application_1111111111_2222")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
+ assertTrue(verifyMetrics(metric, m1));
+ } else if (entity.getId().equals("application_1111111111_2224")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
+ assertTrue(verifyMetrics(metric, m1));
+ }
+ }
+ }
+
+ uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
+ "timeline/clusters/cluster1/users/user1/flows/flow_name/apps?" +
+ "fields=ALL&metricslimit=6");
+ resp = getResponse(client, uri);
+ entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+ assertNotNull(entities);
+ assertEquals(3, entities.size());
+ for (TimelineEntity entity : entities) {
+ assertTrue("Unexpected app in result",
+ (entity.getId().equals("application_1111111111_1111") &&
+ entity.getConfigs().size() == 1 &&
+ entity.getConfigs().equals(ImmutableMap.of("cfg2", "value1"))) ||
+ (entity.getId().equals("application_1111111111_2222") &&
+ entity.getConfigs().size() == 1 &&
+ entity.getConfigs().equals(ImmutableMap.of("cfg1", "value1"))) ||
+ (entity.getId().equals("application_1111111111_2224") &&
+ entity.getConfigs().size() == 0));
+ for (TimelineMetric metric : entity.getMetrics()) {
+ if (entity.getId().equals("application_1111111111_1111")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.TIME_SERIES,
+ "HDFS_BYTES_READ", ts - 80000, 57L);
+ m1.addValue(ts - 100000, 31L);
+ TimelineMetric m2 = newMetric(TimelineMetric.Type.TIME_SERIES,
+ "MAP_SLOT_MILLIS", ts - 80000, 40L);
+ m2.addValue(ts - 100000, 2L);
+ TimelineMetric m3 = newMetric(TimelineMetric.Type.TIME_SERIES,
+ "MAP1_SLOT_MILLIS", ts - 80000, 40L);
+ m3.addValue(ts - 100000, 2L);
+ assertTrue(verifyMetrics(metric, m1, m2, m3));
+ } else if (entity.getId().equals("application_1111111111_2222")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.TIME_SERIES,
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
+ m1.addValue(ts - 100000, 5L);
+ assertTrue(verifyMetrics(metric, m1));
+ } else if (entity.getId().equals("application_1111111111_2224")) {
+ TimelineMetric m1 = newMetric(TimelineMetric.Type.TIME_SERIES,
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
+ m1.addValue(ts - 100000, 5L);
+ assertTrue(verifyMetrics(metric, m1));
+ }
+ }
}
// Query without specifying cluster ID.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc78a937/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
index bcf2d2c..c002ca0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
@@ -612,7 +612,7 @@ public class TestHBaseTimelineStorage {
aggMetric.setId("MEM_USAGE");
Map<Long, Number> aggMetricValues = new HashMap<Long, Number>();
ts = System.currentTimeMillis();
- aggMetricValues.put(ts - 120000, 102400000);
+ aggMetricValues.put(ts - 120000, 102400000L);
aggMetric.setType(Type.SINGLE_VALUE);
aggMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
aggMetric.setValues(aggMetricValues);
@@ -721,12 +721,14 @@ public class TestHBaseTimelineStorage {
NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
matchMetrics(metricValues, metricMap);
- // read the timeline entity using the reader this time
+ // read the timeline entity using the reader this time. In metrics limit
+ // specify Integer MAX_VALUE. A TIME_SERIES will be returned(if more than
+ // one value exists for a metric).
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, appId,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(
- null, null, EnumSet.of(TimelineReader.Field.ALL)));
+ new TimelineDataToRetrieve(null, null,
+ EnumSet.of(TimelineReader.Field.ALL), Integer.MAX_VALUE));
assertNotNull(e1);
// verify attributes
@@ -753,12 +755,69 @@ public class TestHBaseTimelineStorage {
assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") ||
metric2.getId().equals("MEM_USAGE"));
if (metric2.getId().equals("MAP_SLOT_MILLIS")) {
+ assertEquals(6, metricValues2.size());
matchMetrics(metricValues, metricValues2);
}
if (metric2.getId().equals("MEM_USAGE")) {
+ assertEquals(1, metricValues2.size());
matchMetrics(aggMetricValues, metricValues2);
}
}
+
+ // In metrics limit specify a value of 3. No more than 3 values for a
+ // metric will be returned.
+ e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow,
+ runid, appId, entity.getType(), entity.getId()),
+ new TimelineDataToRetrieve(null, null,
+ EnumSet.of(TimelineReader.Field.ALL), 3));
+ assertNotNull(e1);
+ assertEquals(appId, e1.getId());
+ assertEquals(TimelineEntityType.YARN_APPLICATION.toString(),
+ e1.getType());
+ assertEquals(conf, e1.getConfigs());
+ metrics2 = e1.getMetrics();
+ assertEquals(2, metrics2.size());
+ for (TimelineMetric metric2 : metrics2) {
+ Map<Long, Number> metricValues2 = metric2.getValues();
+ assertTrue(metricValues2.size() <= 3);
+ assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") ||
+ metric2.getId().equals("MEM_USAGE"));
+ }
+
+ // Check if single value(latest value) instead of time series is returned
+ // if metricslimit is not set(null), irrespective of number of metric
+ // values.
+ e1 = reader.getEntity(
+ new TimelineReaderContext(cluster, user, flow, runid, appId,
+ entity.getType(), entity.getId()), new TimelineDataToRetrieve(
+ null, null, EnumSet.of(TimelineReader.Field.ALL), null));
+ assertNotNull(e1);
+ assertEquals(appId, e1.getId());
+ assertEquals(TimelineEntityType.YARN_APPLICATION.toString(),
+ e1.getType());
+ assertEquals(cTime, e1.getCreatedTime());
+ assertEquals(infoMap, e1.getInfo());
+ assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
+ assertEquals(relatesTo, e1.getRelatesToEntities());
+ assertEquals(conf, e1.getConfigs());
+ assertEquals(2, e1.getMetrics().size());
+ for (TimelineMetric metric : e1.getMetrics()) {
+ assertEquals(1, metric.getValues().size());
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ assertTrue(metric.getId().equals("MAP_SLOT_MILLIS") ||
+ metric.getId().equals("MEM_USAGE"));
+ assertEquals(1, metric.getValues().size());
+ if (metric.getId().equals("MAP_SLOT_MILLIS")) {
+ assertTrue(metric.getValues().containsKey(ts - 20000));
+ assertEquals(metricValues.get(ts - 20000),
+ metric.getValues().get(ts - 20000));
+ }
+ if (metric.getId().equals("MEM_USAGE")) {
+ assertTrue(metric.getValues().containsKey(ts - 120000));
+ assertEquals(aggMetricValues.get(ts - 120000),
+ metric.getValues().get(ts - 120000));
+ }
+ }
} finally {
if (hbi != null) {
hbi.stop();
@@ -839,8 +898,8 @@ public class TestHBaseTimelineStorage {
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
- String appName =
- ApplicationId.newInstance(System.currentTimeMillis(), 1).toString();
+ String appName = ApplicationId.newInstance(System.currentTimeMillis() +
+ 9000000L, 1).toString();
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.stop();
@@ -931,12 +990,14 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL),
+ Integer.MAX_VALUE));
Set<TimelineEntity> es1 = reader.getEntities(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL),
+ Integer.MAX_VALUE));
assertNotNull(e1);
assertEquals(1, es1.size());
@@ -962,6 +1023,25 @@ public class TestHBaseTimelineStorage {
Map<Long, Number> metricValues2 = metric2.getValues();
matchMetrics(metricValues, metricValues2);
}
+
+ e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow,
+ runid, appName, entity.getType(), entity.getId()),
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
+ assertNotNull(e1);
+ assertEquals(id, e1.getId());
+ assertEquals(type, e1.getType());
+ assertEquals(cTime, e1.getCreatedTime());
+ assertEquals(infoMap, e1.getInfo());
+ assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
+ assertEquals(relatesTo, e1.getRelatesToEntities());
+ assertEquals(conf, e1.getConfigs());
+ for (TimelineMetric metric : e1.getMetrics()) {
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ assertEquals(1, metric.getValues().size());
+ assertTrue(metric.getValues().containsKey(ts - 20000));
+ assertEquals(metricValues.get(ts - 20000),
+ metric.getValues().get(ts - 20000));
+ }
} finally {
if (hbi != null) {
hbi.stop();
@@ -1067,11 +1147,11 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
TimelineEntity e2 = reader.getEntity(
new TimelineReaderContext(cluster, user, null, null, appName,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertNotNull(e2);
assertEquals(e1, e2);
@@ -1125,8 +1205,8 @@ public class TestHBaseTimelineStorage {
String flow = "other_flow_name";
String flowVersion = "1111F01C2287BA";
long runid = 1009876543218L;
- String appName =
- ApplicationId.newInstance(System.currentTimeMillis(), 1).toString();
+ String appName = ApplicationId.newInstance(System.currentTimeMillis() +
+ 9000000L, 1).toString();
byte[] startRow =
EntityRowKey.getRowKeyPrefix(cluster, user, flow, runid, appName);
hbi.write(cluster, user, flow, flowVersion, runid, appName, entities);
@@ -1173,12 +1253,12 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
Set<TimelineEntity> es1 = reader.getEntities(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
assertEquals(1, es1.size());
@@ -1235,7 +1315,7 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext(cluster, user, flow, runid, appName,
entity.getType(), entity.getId()),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(e1);
// check the events
NavigableSet<TimelineEvent> events = e1.getEvents();
@@ -1325,7 +1405,7 @@ public class TestHBaseTimelineStorage {
TimelineEntity entity = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", "hello"),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(entity);
assertEquals(3, entity.getConfigs().size());
assertEquals(1, entity.getIsRelatedToEntities().size());
@@ -1333,7 +1413,7 @@ public class TestHBaseTimelineStorage {
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world",
null), new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(3, entities.size());
int cfgCnt = 0;
int metricCnt = 0;
@@ -1457,7 +1537,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
null, ef),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(1, entities.size());
int eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -1583,7 +1663,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, irt, null, null, null,
null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -1732,7 +1812,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, rt, null, null, null, null,
null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -1932,7 +2012,7 @@ public class TestHBaseTimelineStorage {
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", "hello"),
new TimelineDataToRetrieve(
- null, null, EnumSet.of(Field.INFO, Field.CONFIGS)));
+ null, null, EnumSet.of(Field.INFO, Field.CONFIGS), null));
assertNotNull(e1);
assertEquals(3, e1.getConfigs().size());
assertEquals(0, e1.getIsRelatedToEntities().size());
@@ -1941,7 +2021,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(),
new TimelineDataToRetrieve(
- null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS)));
+ null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null));
assertEquals(3, es1.size());
int metricsCnt = 0;
int isRelatedToCnt = 0;
@@ -1964,14 +2044,14 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", "hello"),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
assertNotNull(e1);
assertEquals(1, e1.getConfigs().size());
Set<TimelineEntity> es1 = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
int cfgCnt = 0;
for (TimelineEntity entity : es1) {
cfgCnt += entity.getConfigs().size();
@@ -2002,7 +2082,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(2, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2015,7 +2096,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2031,7 +2112,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList1, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(1, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2049,7 +2131,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList2, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList3 = new TimelineFilterList(
@@ -2060,7 +2143,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList3, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList4 = new TimelineFilterList(
@@ -2071,7 +2155,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList4, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(
@@ -2082,7 +2167,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList5, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(3, entities.size());
}
@@ -2099,7 +2185,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2130,7 +2216,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList1, null, null),
- new TimelineDataToRetrieve(confsToRetrieve, null, null));
+ new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2151,14 +2237,14 @@ public class TestHBaseTimelineStorage {
TimelineEntity e1 = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", "hello"),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
assertNotNull(e1);
assertEquals(1, e1.getMetrics().size());
Set<TimelineEntity> es1 = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
int metricCnt = 0;
for (TimelineEntity entity : es1) {
metricCnt += entity.getMetrics().size();
@@ -2187,7 +2273,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2200,7 +2287,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2218,7 +2305,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2236,7 +2324,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList2, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(
@@ -2247,7 +2336,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList3, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(
@@ -2258,7 +2348,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList4, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList5 = new TimelineFilterList(
@@ -2269,7 +2360,8 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList5, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(3, entities.size());
}
@@ -2286,7 +2378,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
assertEquals(1, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2316,17 +2408,38 @@ public class TestHBaseTimelineStorage {
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
new TimelineDataToRetrieve(
- null, metricsToRetrieve, EnumSet.of(Field.METRICS)));
+ null, metricsToRetrieve, EnumSet.of(Field.METRICS), null));
+ assertEquals(2, entities.size());
+ metricCnt = 0;
+ for (TimelineEntity entity : entities) {
+ metricCnt += entity.getMetrics().size();
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
+ assertEquals(1, metric.getValues().size());
+ assertTrue("Metric Id returned should start with MAP1_",
+ metric.getId().startsWith("MAP1_"));
+ }
+ }
+ assertEquals(2, metricCnt);
+
+ entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1",
+ "some_flow_name", 1002345678919L, "application_1231111111_1111","world",
+ null), new TimelineEntityFilters(null, null, null, null, null, null,
+ null, metricFilterList1, null), new TimelineDataToRetrieve(null,
+ metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE));
assertEquals(2, entities.size());
metricCnt = 0;
+ int metricValCnt = 0;
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
+ metricValCnt += metric.getValues().size();
assertTrue("Metric Id returned should start with MAP1_",
metric.getId().startsWith("MAP1_"));
}
}
assertEquals(2, metricCnt);
+ assertEquals(7, metricValCnt);
}
@Test
@@ -2348,7 +2461,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(2, entities.size());
int infoCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2364,7 +2477,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList1,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(1, entities.size());
infoCnt = 0;
for (TimelineEntity entity : entities) {
@@ -2382,7 +2495,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList2,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList3 = new TimelineFilterList(
@@ -2393,7 +2506,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList3,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(
@@ -2404,7 +2517,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList4,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList5 = new TimelineFilterList(
@@ -2415,7 +2528,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1231111111_1111","world", null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList5,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(3, entities.size());
}
@@ -2425,7 +2538,7 @@ public class TestHBaseTimelineStorage {
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1111111111_2222",
TimelineEntityType.YARN_APPLICATION.toString(), null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(entity);
assertEquals(3, entity.getConfigs().size());
assertEquals(1, entity.getIsRelatedToEntities().size());
@@ -2434,7 +2547,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(3, entities.size());
int cfgCnt = 0;
int metricCnt = 0;
@@ -2546,7 +2659,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, "application_1111111111_2222",
TimelineEntityType.YARN_APPLICATION.toString(), null),
new TimelineDataToRetrieve(
- null, null, EnumSet.of(Field.INFO, Field.CONFIGS)));
+ null, null, EnumSet.of(Field.INFO, Field.CONFIGS), null));
assertNotNull(e1);
assertEquals(3, e1.getConfigs().size());
assertEquals(0, e1.getIsRelatedToEntities().size());
@@ -2556,7 +2669,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(),
new TimelineDataToRetrieve(
- null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS)));
+ null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null));
assertEquals(3, es1.size());
int metricsCnt = 0;
int isRelatedToCnt = 0;
@@ -2586,7 +2699,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, irt, null, null, null,
null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int isRelatedToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -2745,7 +2858,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, rt, null, null, null, null,
null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
int relatesToCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -2989,7 +3102,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(2, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3003,7 +3117,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3020,7 +3134,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList1, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(1, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3039,7 +3154,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList2, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList3 = new TimelineFilterList(
@@ -3051,7 +3167,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList3, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList4 = new TimelineFilterList(
@@ -3063,7 +3180,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList4, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList confFilterList5 = new TimelineFilterList(
@@ -3075,7 +3193,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList5, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS),
+ null));
assertEquals(3, entities.size());
}
@@ -3092,7 +3211,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
null, ef),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(1, entities.size());
int eventCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -3218,7 +3337,7 @@ public class TestHBaseTimelineStorage {
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1111111111_2222",
TimelineEntityType.YARN_APPLICATION.toString(), null),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
assertNotNull(e1);
assertEquals(1, e1.getConfigs().size());
Set<TimelineEntity> es1 = reader.getEntities(
@@ -3226,7 +3345,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
null) ,
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
int cfgCnt = 0;
for (TimelineEntity entity : es1) {
cfgCnt += entity.getConfigs().size();
@@ -3252,7 +3371,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList, null, null),
- new TimelineDataToRetrieve(list, null, null));
+ new TimelineDataToRetrieve(list, null, null, null));
assertEquals(1, entities.size());
int cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3285,7 +3404,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null,
confFilterList1, null, null),
- new TimelineDataToRetrieve(confsToRetrieve, null, null));
+ new TimelineDataToRetrieve(confsToRetrieve, null, null, null));
assertEquals(2, entities.size());
cfgCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3316,7 +3435,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3330,7 +3450,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3349,7 +3469,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3368,7 +3489,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList2, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(
@@ -3380,7 +3502,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList3, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList4 = new TimelineFilterList(
@@ -3392,7 +3515,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList4, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList5 = new TimelineFilterList(
@@ -3404,7 +3528,8 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList5, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS),
+ null));
assertEquals(3, entities.size());
}
@@ -3417,7 +3542,7 @@ public class TestHBaseTimelineStorage {
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
1002345678919L, "application_1111111111_2222",
TimelineEntityType.YARN_APPLICATION.toString(), null),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
assertNotNull(e1);
assertEquals(1, e1.getMetrics().size());
Set<TimelineEntity> es1 = reader.getEntities(
@@ -3425,7 +3550,7 @@ public class TestHBaseTimelineStorage {
1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
int metricCnt = 0;
for (TimelineEntity entity : es1) {
metricCnt += entity.getMetrics().size();
@@ -3451,7 +3576,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList, null),
- new TimelineDataToRetrieve(null, list, null));
+ new TimelineDataToRetrieve(null, list, null, null));
int metricCnt = 0;
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
@@ -3477,17 +3602,37 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList1, null),
- new TimelineDataToRetrieve(null, metricsToRetrieve, null));
+ new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
+ metricCnt = 0;
+ assertEquals(2, entities.size());
+ for (TimelineEntity entity : entities) {
+ metricCnt += entity.getMetrics().size();
+ for (TimelineMetric metric : entity.getMetrics()) {
+ assertTrue("Metric Id returned should start with MAP1_",
+ metric.getId().startsWith("MAP1_"));
+ }
+ }
+ assertEquals(2, metricCnt);
+
+ entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1",
+ "some_flow_name", 1002345678919L, null,
+ TimelineEntityType.YARN_APPLICATION.toString(), null),
+ new TimelineEntityFilters(null, null, null, null, null, null, null,
+ metricFilterList1, null), new TimelineDataToRetrieve(null,
+ metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE));
metricCnt = 0;
+ int metricValCnt = 0;
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
+ metricValCnt += metric.getValues().size();
assertTrue("Metric Id returned should start with MAP1_",
metric.getId().startsWith("MAP1_"));
}
}
assertEquals(2, metricCnt);
+ assertEquals(7, metricValCnt);
}
@Test
@@ -3510,7 +3655,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(2, entities.size());
int infoCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3527,7 +3672,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList1,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(1, entities.size());
infoCnt = 0;
for (TimelineEntity entity : entities) {
@@ -3546,7 +3691,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList2,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList3 = new TimelineFilterList(
@@ -3558,7 +3703,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList3,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList4 = new TimelineFilterList(
@@ -3570,7 +3715,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList4,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(0, entities.size());
TimelineFilterList infoFilterList5 = new TimelineFilterList(
@@ -3582,7 +3727,7 @@ public class TestHBaseTimelineStorage {
null),
new TimelineEntityFilters(null, null, null, null, null, infoFilterList5,
null, null, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO)));
+ new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null));
assertEquals(3, entities.size());
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc78a937/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
index 801d43c..d0f98a5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
@@ -422,7 +422,7 @@ public class TestHBaseStorageFlowRun {
TimelineEntity entity = hbr.getEntity(
new TimelineReaderContext(cluster, user, flow, 1002345678919L, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
- new TimelineDataToRetrieve(null, metricsToRetrieve, null));
+ new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertTrue(TimelineEntityType.YARN_FLOW_RUN.matches(entity.getType()));
Set<TimelineMetric> metrics = entity.getMetrics();
assertEquals(1, metrics.size());
@@ -447,7 +447,7 @@ public class TestHBaseStorageFlowRun {
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, metricsToRetrieve, null));
+ new TimelineDataToRetrieve(null, metricsToRetrieve, null, null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity timelineEntity : entities) {
@@ -513,8 +513,8 @@ public class TestHBaseStorageFlowRun {
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, runid, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
- new TimelineEntityFilters(),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null,
+ EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
for (TimelineEntity timelineEntity : entities) {
Set<TimelineMetric> timelineMetrics = timelineEntity.getMetrics();
@@ -766,8 +766,8 @@ public class TestHBaseStorageFlowRun {
new TimelineReaderContext(cluster, user, flow, null,
null, TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
- metricFilterList, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ metricFilterList, null), new TimelineDataToRetrieve(null, null,
+ EnumSet.of(Field.METRICS), null));
assertEquals(2, entities.size());
int metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -783,8 +783,8 @@ public class TestHBaseStorageFlowRun {
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
- metricFilterList1, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ metricFilterList1, null), new TimelineDataToRetrieve(null, null,
+ EnumSet.of(Field.METRICS), null));
assertEquals(1, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
@@ -799,8 +799,8 @@ public class TestHBaseStorageFlowRun {
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
- metricFilterList2, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ metricFilterList2, null), new TimelineDataToRetrieve(null, null,
+ EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList metricFilterList3 = new TimelineFilterList(
@@ -809,8 +809,8 @@ public class TestHBaseStorageFlowRun {
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
new TimelineEntityFilters(null, null, null, null, null, null, null,
- metricFilterList3, null),
- new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS)));
+ metricFilterList3, null), new TimelineDataToRetrieve(null, null,
+ EnumSet.of(Field.METRICS), null));
assertEquals(0, entities.size());
TimelineFilterList list3 = new TimelineFilterList();
@@ -832,7 +832,7 @@ public class TestHBaseStorageFlowRun {
new TimelineEntityFilters(null, null, null, null, null, null, null,
metricFilterList4, null),
new TimelineDataToRetrieve(null, metricsToRetrieve,
- EnumSet.of(Field.ALL)));
+ EnumSet.of(Field.ALL), null));
assertEquals(2, entities.size());
metricCnt = 0;
for (TimelineEntity entity : entities) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc78a937/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
index 0cc83d7..325050a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
@@ -53,6 +53,10 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Fiel
* object to retrieve, see {@link Field}. If null, retrieves 3 fields,
* namely entity id, entity type and entity created time. All fields will
* be returned if {@link Field#ALL} is specified.</li>
+ * <li><b>metricsLimit</b> - If fieldsToRetrieve contains METRICS/ALL or
+ * metricsToRetrieve is specified, this limit defines an upper limit to the
+ * number of metrics to return. This parameter is ignored if METRICS are not to
+ * be fetched.</li>
* </ul>
*/
@Private
@@ -61,16 +65,28 @@ public class TimelineDataToRetrieve {
private TimelineFilterList confsToRetrieve;
private TimelineFilterList metricsToRetrieve;
private EnumSet<Field> fieldsToRetrieve;
+ private Integer metricsLimit;
+
+ /**
+ * Default limit of number of metrics to return.
+ */
+ public static final Integer DEFAULT_METRICS_LIMIT = 1;
public TimelineDataToRetrieve() {
- this(null, null, null);
+ this(null, null, null, null);
}
public TimelineDataToRetrieve(TimelineFilterList confs,
- TimelineFilterList metrics, EnumSet<Field> fields) {
+ TimelineFilterList metrics, EnumSet<Field> fields,
+ Integer limitForMetrics) {
this.confsToRetrieve = confs;
this.metricsToRetrieve = metrics;
this.fieldsToRetrieve = fields;
+ if (limitForMetrics == null || limitForMetrics < 1) {
+ this.metricsLimit = DEFAULT_METRICS_LIMIT;
+ } else {
+ this.metricsLimit = limitForMetrics;
+ }
if (this.fieldsToRetrieve == null) {
this.fieldsToRetrieve = EnumSet.noneOf(Field.class);
@@ -116,4 +132,16 @@ public class TimelineDataToRetrieve {
fieldsToRetrieve.add(Field.METRICS);
}
}
+
+ public Integer getMetricsLimit() {
+ return metricsLimit;
+ }
+
+ public void setMetricsLimit(Integer limit) {
+ if (limit == null || limit < 1) {
+ this.metricsLimit = DEFAULT_METRICS_LIMIT;
+ } else {
+ this.metricsLimit = limit;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc78a937/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
index 4821d31..8f2b725 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
@@ -32,8 +32,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyVa
* filters restrict the number of entities to return.<br>
* Filters contain the following :<br>
* <ul>
- * <li><b>limit</b> - A limit on the number of entities to return. If null
- * or {@literal <=0}, defaults to {@link #DEFAULT_LIMIT}.</li>
+ * <li><b>limit</b> - A limit on the number of entities to return. If null or
+ * {@literal < 0}, defaults to {@link #DEFAULT_LIMIT}. The maximum possible
+ * value for limit can be {@link Long#MAX_VALUE}.</li>
* <li><b>createdTimeBegin</b> - Matched entities should not be created
* before this timestamp. If null or {@literal <=0}, defaults to 0.</li>
* <li><b>createdTimeEnd</b> - Matched entities should not be created after
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org