You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2015/05/01 09:01:35 UTC
[18/50] [abbrv] hadoop git commit: YARN-3530. ATS throws exception on
trying to filter results without otherinfo. Contributed by zhijie shen
YARN-3530. ATS throws exception on trying to filter results without
otherinfo. Contributed by zhijie shen
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7f07c4d8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7f07c4d8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7f07c4d8
Branch: refs/heads/HDFS-7240
Commit: 7f07c4d81023e3bf4bf8980e64cc9420ec31cf55
Parents: 9a3dda3
Author: Xuan <xg...@apache.org>
Authored: Mon Apr 27 10:36:42 2015 -0700
Committer: Xuan <xg...@apache.org>
Committed: Mon Apr 27 10:36:42 2015 -0700
----------------------------------------------------------------------
hadoop-yarn-project/CHANGES.txt | 3 +
.../server/timeline/LeveldbTimelineStore.java | 34 ++++++-
.../server/timeline/TimelineStoreTestUtils.java | 99 ++++++++++++++------
3 files changed, 104 insertions(+), 32 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7f07c4d8/hadoop-yarn-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 87db291..fdc3f4a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -271,6 +271,9 @@ Release 2.8.0 - UNRELEASED
YARN-3464. Race condition in LocalizerRunner kills localizer before
localizing all resources. (Zhihai Xu via kasha)
+ YARN-3530. ATS throws exception on trying to filter results without otherinfo.
+ (zhijie shen via xgong)
+
Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7f07c4d8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
index d521f70..8cfa0c7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
@@ -357,6 +357,9 @@ public class LeveldbTimelineStore extends AbstractService
iterator = new LeveldbIterator(db);
iterator.seek(prefix);
+ if (fields == null) {
+ fields = EnumSet.allOf(Field.class);
+ }
return getEntity(entityId, entityType, revStartTime, fields, iterator,
prefix, prefix.length);
} catch(DBException e) {
@@ -373,10 +376,6 @@ public class LeveldbTimelineStore extends AbstractService
private static TimelineEntity getEntity(String entityId, String entityType,
Long startTime, EnumSet<Field> fields, LeveldbIterator iterator,
byte[] prefix, int prefixlen) throws IOException {
- if (fields == null) {
- fields = EnumSet.allOf(Field.class);
- }
-
TimelineEntity entity = new TimelineEntity();
boolean events = false;
boolean lastEvent = false;
@@ -590,6 +589,25 @@ public class LeveldbTimelineStore extends AbstractService
String entityType, Long limit, Long starttime, Long endtime,
String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters,
EnumSet<Field> fields, CheckAcl checkAcl) throws IOException {
+ // Even if other info and primary filter fields are not included, we
+ // still need to load them to match secondary filters when they are
+ // non-empty
+ if (fields == null) {
+ fields = EnumSet.allOf(Field.class);
+ }
+ boolean addPrimaryFilters = false;
+ boolean addOtherInfo = false;
+ if (secondaryFilters != null && secondaryFilters.size() > 0) {
+ if (!fields.contains(Field.PRIMARY_FILTERS)) {
+ fields.add(Field.PRIMARY_FILTERS);
+ addPrimaryFilters = true;
+ }
+ if (!fields.contains(Field.OTHER_INFO)) {
+ fields.add(Field.OTHER_INFO);
+ addOtherInfo = true;
+ }
+ }
+
LeveldbIterator iterator = null;
try {
KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
@@ -690,6 +708,14 @@ public class LeveldbTimelineStore extends AbstractService
entity.setDomainId(DEFAULT_DOMAIN_ID);
}
if (checkAcl == null || checkAcl.check(entity)) {
+ // Remove primary filter and other info if they are added for
+ // matching secondary filters
+ if (addPrimaryFilters) {
+ entity.setPrimaryFilters(null);
+ }
+ if (addOtherInfo) {
+ entity.setOtherInfo(null);
+ }
entities.addEntity(entity);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7f07c4d8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
index da71f46..6ac5a35 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
@@ -28,6 +28,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -487,6 +488,13 @@ public class TimelineStoreTestUtils {
primaryFilter, secondaryFilters, null, null).getEntities();
}
+ protected List<TimelineEntity> getEntitiesWithFilters(String entityType,
+ NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
+ EnumSet<Field> fields) throws IOException {
+ return store.getEntities(entityType, null, null, null, null, null,
+ primaryFilter, secondaryFilters, fields, null).getEntities();
+ }
+
protected List<TimelineEntity> getEntities(String entityType, Long limit,
Long windowStart, Long windowEnd, NameValuePair primaryFilter,
EnumSet<Field> fields) throws IOException {
@@ -751,38 +759,73 @@ public class TimelineStoreTestUtils {
}
public void testGetEntitiesWithSecondaryFilters() throws IOException {
- // test using secondary filter
- List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
- goodTestingFilters);
- assertEquals(3, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0), domainId1);
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1), domainId1);
- verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(2), domainId2);
-
- entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters);
- assertEquals(3, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0), domainId1);
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1), domainId1);
- verifyEntityInfo(entityId6, entityType1, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(2), domainId2);
+ for (int i = 0; i < 4; ++i) {
+ // Verify the secondary filter works both other info is included or not.
+ EnumSet<Field> fields = null;
+ if (i == 1) {
+ fields = EnumSet.noneOf(Field.class);
+ } else if (i == 2) {
+ fields = EnumSet.of(Field.PRIMARY_FILTERS);
+ } else if (i == 3) {
+ fields = EnumSet.of(Field.OTHER_INFO);
+ }
+ // test using secondary filter
+ List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
+ goodTestingFilters, fields);
+ assertEquals(3, entities.size());
+ verifyEntityInfo(entityId1, entityType1,
+ (i == 0 ? events1 : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(0), domainId1);
+ verifyEntityInfo(entityId1b, entityType1,
+ (i == 0 ? events1 : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(1), domainId1);
+ verifyEntityInfo(entityId6, entityType1,
+ (i == 0 ? EMPTY_EVENTS : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(2), domainId2);
+
+ entities =
+ getEntitiesWithFilters("type_1", userFilter, goodTestingFilters, fields);
+ assertEquals(3, entities.size());
+ if (i == 0) {
+ verifyEntityInfo(entityId1, entityType1,
+ (i == 0 ? events1 : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(0), domainId1);
+ verifyEntityInfo(entityId1b, entityType1,
+ (i == 0 ? events1 : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(1), domainId1);
+ verifyEntityInfo(entityId6, entityType1,
+ (i == 0 ? EMPTY_EVENTS : null),
+ (i == 0 ? EMPTY_REL_ENTITIES : null),
+ (i == 0 || i == 2 ? primaryFilters : null),
+ (i == 0 || i == 3 ? otherInfo : null), entities.get(2), domainId2);
+ }
- entities = getEntitiesWithFilters("type_1", null,
- Collections.singleton(new NameValuePair("user", "none")));
- assertEquals(0, entities.size());
+ entities = getEntitiesWithFilters("type_1", null,
+ Collections.singleton(new NameValuePair("user", "none")), fields);
+ assertEquals(0, entities.size());
- entities = getEntitiesWithFilters("type_1", null, badTestingFilters);
- assertEquals(0, entities.size());
+ entities =
+ getEntitiesWithFilters("type_1", null, badTestingFilters, fields);
+ assertEquals(0, entities.size());
- entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters);
- assertEquals(0, entities.size());
+ entities =
+ getEntitiesWithFilters("type_1", userFilter, badTestingFilters, fields);
+ assertEquals(0, entities.size());
- entities = getEntitiesWithFilters("type_5", null, badTestingFilters);
- assertEquals(0, entities.size());
+ entities =
+ getEntitiesWithFilters("type_5", null, badTestingFilters, fields);
+ assertEquals(0, entities.size());
+ }
}
public void testGetEvents() throws IOException {