You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by pr...@apache.org on 2022/07/02 16:29:34 UTC

[hadoop] branch trunk updated: YARN-9403.GET /apps/{appid}/entities/YARN_APPLICATION accesses application table instead of entity table (#4516)

This is an automated email from the ASF dual-hosted git repository.

prabhujoseph pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 151bb31c470 YARN-9403.GET /apps/{appid}/entities/YARN_APPLICATION accesses application table instead of entity table (#4516)
151bb31c470 is described below

commit 151bb31c470d7d6aaab4ca78a1155f88246ee03b
Author: Ashutosh Gupta <as...@st.niituniversity.in>
AuthorDate: Sat Jul 2 17:29:28 2022 +0100

    YARN-9403.GET /apps/{appid}/entities/YARN_APPLICATION accesses application table instead of entity table (#4516)
    
    Co-authored-by: Ashutosh Gupta <as...@amazon.com>
---
 .../TestTimelineReaderWebServicesHBaseStorage.java |  16 +++
 .../reader/TimelineEntityReaderFactory.java        |  52 ++++-----
 .../reader/TimelineReaderContext.java              |  23 +++-
 .../reader/TimelineReaderWebServices.java          | 120 +++++++++++++--------
 4 files changed, 142 insertions(+), 69 deletions(-)

diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index b35b3dc8b79..271c5e5ce13 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -2610,4 +2610,20 @@ public class TestTimelineReaderWebServicesHBaseStorage
       client.destroy();
     }
   }
+
+  @Test
+  public void testGetEntityWithSystemEntityType() throws Exception {
+    Client client = createClient();
+    try {
+      URI uri = URI.create("http://localhost:" + getServerPort() + "/ws/v2/" +
+          "timeline/apps/application_1111111111_1111/" +
+          "entities/YARN_APPLICATION");
+      ClientResponse resp = getResponse(client, uri);
+      Set<TimelineEntity> entities =
+          resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
+      assertEquals(0, entities.size());
+    } finally {
+      client.destroy();
+    }
+  }
 }
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityR [...]
index fa16077c2ed..77ac0ca8045 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
@@ -43,18 +43,20 @@ public final class TimelineEntityReaderFactory {
       TimelineReaderContext context, TimelineDataToRetrieve dataToRetrieve) {
     // currently the types that are handled separate from the generic entity
     // table are application, flow run, and flow activity entities
-    if (TimelineEntityType.YARN_APPLICATION.matches(context.getEntityType())) {
-      return new ApplicationEntityReader(context, dataToRetrieve);
-    } else if (TimelineEntityType.
-        YARN_FLOW_RUN.matches(context.getEntityType())) {
-      return new FlowRunEntityReader(context, dataToRetrieve);
-    } else if (TimelineEntityType.
-        YARN_FLOW_ACTIVITY.matches(context.getEntityType())) {
-      return new FlowActivityEntityReader(context, dataToRetrieve);
-    } else {
-      // assume we're dealing with a generic entity read
-      return new GenericEntityReader(context, dataToRetrieve);
+    if (!context.isGenericEntity()) {
+      if (TimelineEntityType.
+          YARN_APPLICATION.matches(context.getEntityType())) {
+        return new ApplicationEntityReader(context, dataToRetrieve);
+      } else if (TimelineEntityType.
+          YARN_FLOW_RUN.matches(context.getEntityType())) {
+        return new FlowRunEntityReader(context, dataToRetrieve);
+      } else if (TimelineEntityType.
+          YARN_FLOW_ACTIVITY.matches(context.getEntityType())) {
+        return new FlowActivityEntityReader(context, dataToRetrieve);
+      }
     }
+    // assume we're dealing with a generic entity read
+    return new GenericEntityReader(context, dataToRetrieve);
   }
 
   /**
@@ -73,21 +75,23 @@ public final class TimelineEntityReaderFactory {
       TimelineDataToRetrieve dataToRetrieve) {
     // currently the types that are handled separate from the generic entity
     // table are application, flow run, and flow activity entities
-    if (TimelineEntityType.YARN_APPLICATION.matches(context.getEntityType())) {
-      return new ApplicationEntityReader(context, filters, dataToRetrieve);
-    } else if (TimelineEntityType.
-        YARN_FLOW_ACTIVITY.matches(context.getEntityType())) {
-      return new FlowActivityEntityReader(context, filters, dataToRetrieve);
-    } else if (TimelineEntityType.
-        YARN_FLOW_RUN.matches(context.getEntityType())) {
-      return new FlowRunEntityReader(context, filters, dataToRetrieve);
-    } else {
-      if (context.getDoAsUser() != null) {
-        return new SubApplicationEntityReader(context, filters, dataToRetrieve);
+    if (!context.isGenericEntity()) {
+      if (TimelineEntityType.
+          YARN_APPLICATION.matches(context.getEntityType())) {
+        return new ApplicationEntityReader(context, filters, dataToRetrieve);
+      } else if (TimelineEntityType.
+          YARN_FLOW_ACTIVITY.matches(context.getEntityType())) {
+        return new FlowActivityEntityReader(context, filters, dataToRetrieve);
+      } else if (TimelineEntityType.
+          YARN_FLOW_RUN.matches(context.getEntityType())) {
+        return new FlowRunEntityReader(context, filters, dataToRetrieve);
       }
-      // assume we're dealing with a generic entity read
-      return new GenericEntityReader(context, filters, dataToRetrieve);
     }
+    if (context.getDoAsUser() != null) {
+      return new SubApplicationEntityReader(context, filters, dataToRetrieve);
+    }
+    // assume we're dealing with a generic entity read
+    return new GenericEntityReader(context, filters, dataToRetrieve);
   }
 
   /**
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderContext.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderContext.java
index 67c3d297e11..62e39d2923b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderContext.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderContext.java
@@ -33,6 +33,8 @@ public class TimelineReaderContext extends TimelineContext {
   private String entityId;
   private Long entityIdPrefix;
   private String doAsUser;
+  private boolean genericEntity = false;
+
   public TimelineReaderContext(String clusterId, String userId, String flowName,
       Long flowRunId, String appId, String entityType, String entityId) {
     super(clusterId, userId, flowName, flowRunId, appId);
@@ -55,10 +57,19 @@ public class TimelineReaderContext extends TimelineContext {
     this.doAsUser = doasUser;
   }
 
+  public TimelineReaderContext(String clusterId, String userId, String flowName,
+      Long flowRunId, String appId, String entityType, Long entityIdPrefix,
+      String entityId, String doasUser, boolean genericEntity) {
+    this(clusterId, userId, flowName, flowRunId, appId, entityType,
+        entityIdPrefix, entityId, doasUser);
+    this.genericEntity = genericEntity;
+  }
+
   public TimelineReaderContext(TimelineReaderContext other) {
     this(other.getClusterId(), other.getUserId(), other.getFlowName(),
         other.getFlowRunId(), other.getAppId(), other.getEntityType(),
-        other.getEntityIdPrefix(), other.getEntityId(), other.getDoAsUser());
+        other.getEntityIdPrefix(), other.getEntityId(), other.getDoAsUser(),
+        other.genericEntity);
   }
 
   @Override
@@ -130,4 +141,14 @@ public class TimelineReaderContext extends TimelineContext {
   public void setDoAsUser(String doAsUser) {
     this.doAsUser = doAsUser;
   }
+
+  public boolean isGenericEntity() {
+    return genericEntity;
+  }
+
+  public void setGenericEntity(boolean genericEntity) {
+    this.genericEntity = genericEntity;
+  }
+
+
 }
\ No newline at end of file
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
index a024b679dda..8d3436374aa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
@@ -371,6 +371,7 @@ public class TimelineReaderWebServices {
       }
       context.setEntityType(
           TimelineReaderWebServicesUtils.parseStr(entityType));
+      context.setGenericEntity(true);
       entities = timelineReaderManager.getEntities(context,
           TimelineReaderWebServicesUtils.createTimelineEntityFilters(
           limit, createdTimeStart, createdTimeEnd, relatesTo, isRelatedTo,
@@ -516,7 +517,7 @@ public class TimelineReaderWebServices {
         flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo,
         isRelatedTo, infofilters, conffilters, metricfilters, eventfilters,
         confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-        metricsTimeStart, metricsTimeEnd, fromId);
+        metricsTimeStart, metricsTimeEnd, fromId, true);
   }
 
   /**
@@ -636,6 +637,23 @@ public class TimelineReaderWebServices {
       @QueryParam("metricstimestart") String metricsTimeStart,
       @QueryParam("metricstimeend") String metricsTimeEnd,
       @QueryParam("fromid") String fromId) {
+    return getEntities(req, res, null, appId, entityType, userId, flowName,
+        flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo,
+        isRelatedTo, infofilters, conffilters, metricfilters, eventfilters,
+        confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
+        metricsTimeStart, metricsTimeEnd, fromId, true);
+  }
+
+  public Set<TimelineEntity> getEntities(HttpServletRequest req,
+      HttpServletResponse res, String clusterId, String appId,
+      String entityType, String userId, String flowName,
+      String flowRunId, String limit, String createdTimeStart,
+      String createdTimeEnd, String relatesTo, String isRelatedTo,
+      String infofilters, String conffilters, String metricfilters,
+      String eventfilters, String confsToRetrieve, String metricsToRetrieve,
+      String fields, String metricsLimit, String metricsTimeStart,
+      String metricsTimeEnd, String fromId,
+      boolean genericEntity) {
     String url = req.getRequestURI() +
         (req.getQueryString() == null ? "" :
             QUERY_STRING_SEP + req.getQueryString());
@@ -652,6 +670,7 @@ public class TimelineReaderWebServices {
       TimelineReaderContext context = TimelineReaderWebServicesUtils
           .createTimelineReaderContext(clusterId, userId, flowName, flowRunId,
               appId, entityType, null, null);
+      context.setGenericEntity(genericEntity);
       entities = timelineReaderManager.getEntities(context,
           TimelineReaderWebServicesUtils
               .createTimelineEntityFilters(limit, createdTimeStart,
@@ -777,6 +796,54 @@ public class TimelineReaderWebServices {
     return entity;
   }
 
+  public TimelineEntity getEntity(HttpServletRequest req,
+      HttpServletResponse res, String clusterId, String appId,
+      String entityType, String entityId, String userId, String flowName,
+      String flowRunId, String confsToRetrieve, String metricsToRetrieve,
+      String fields, String metricsLimit, String metricsTimeStart,
+      String metricsTimeEnd, String entityIdPrefix,
+      boolean genericEntity) {
+    String url = req.getRequestURI() +
+        (req.getQueryString() == null ? "" :
+            QUERY_STRING_SEP + req.getQueryString());
+    UserGroupInformation callerUGI =
+        TimelineReaderWebServicesUtils.getUser(req);
+    LOG.info("Received URL " + url + " from user " +
+        TimelineReaderWebServicesUtils.getUserName(callerUGI));
+    long startTime = Time.monotonicNow();
+    boolean succeeded = false;
+    init(res);
+    TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
+    TimelineEntity entity = null;
+    TimelineReaderContext context = TimelineReaderWebServicesUtils.
+        createTimelineReaderContext(clusterId, userId, flowName, flowRunId,
+        appId, entityType, entityIdPrefix, entityId);
+    context.setGenericEntity(genericEntity);
+    try {
+      entity = timelineReaderManager.getEntity(context,
+          TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
+          confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
+          metricsTimeStart, metricsTimeEnd));
+      checkAccessForGenericEntity(entity, callerUGI);
+      succeeded = true;
+    } catch (Exception e) {
+      handleException(e, url, startTime, "Either flowrunid or metricslimit or"
+              + " metricstime start/end");
+    } finally {
+      long latency = Time.monotonicNow() - startTime;
+      METRICS.addGetEntitiesLatency(latency, succeeded);
+      LOG.info("Processed URL " + url +
+          " (Took " + latency + " ms.)");
+    }
+    if (entity == null) {
+      LOG.info("Processed URL " + url + " but entity not found" + " (Took " +
+          (Time.monotonicNow() - startTime) + " ms.)");
+      throw new NotFoundException("Timeline entity {id: " + entityId +
+          ", type: " + entityType + " } is not found");
+    }
+    return entity;
+  }
+
   /**
    * Return a single entity of the given entity type and Id. Cluster ID is not
    * provided by client so default cluster ID has to be taken. If userid, flow
@@ -853,7 +920,7 @@ public class TimelineReaderWebServices {
       @QueryParam("entityidprefix") String entityIdPrefix) {
     return getEntity(req, res, null, appId, entityType, entityId, userId,
         flowName, flowRunId, confsToRetrieve, metricsToRetrieve, fields,
-        metricsLimit, metricsTimeStart, metricsTimeEnd, entityIdPrefix);
+        metricsLimit, metricsTimeStart, metricsTimeEnd, entityIdPrefix, true);
   }
 
   /**
@@ -932,44 +999,9 @@ public class TimelineReaderWebServices {
       @QueryParam("metricstimestart") String metricsTimeStart,
       @QueryParam("metricstimeend") String metricsTimeEnd,
       @QueryParam("entityidprefix") String entityIdPrefix) {
-    String url = req.getRequestURI() +
-        (req.getQueryString() == null ? "" :
-            QUERY_STRING_SEP + req.getQueryString());
-    UserGroupInformation callerUGI =
-        TimelineReaderWebServicesUtils.getUser(req);
-    LOG.info("Received URL " + url + " from user " +
-        TimelineReaderWebServicesUtils.getUserName(callerUGI));
-    long startTime = Time.monotonicNow();
-    boolean succeeded = false;
-    init(res);
-    TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
-    TimelineEntity entity = null;
-    try {
-      entity = timelineReaderManager.getEntity(
-          TimelineReaderWebServicesUtils.createTimelineReaderContext(
-              clusterId, userId, flowName, flowRunId, appId, entityType,
-              entityIdPrefix, entityId),
-          TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
-          confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-          metricsTimeStart, metricsTimeEnd));
-      checkAccessForGenericEntity(entity, callerUGI);
-      succeeded = true;
-    } catch (Exception e) {
-      handleException(e, url, startTime, "Either flowrunid or metricslimit or"
-          + " metricstime start/end");
-    } finally {
-      long latency = Time.monotonicNow() - startTime;
-      METRICS.addGetEntitiesLatency(latency, succeeded);
-      LOG.info("Processed URL " + url +
-          " (Took " + latency + " ms.)");
-    }
-    if (entity == null) {
-      LOG.info("Processed URL " + url + " but entity not found" + " (Took " +
-          (Time.monotonicNow() - startTime) + " ms.)");
-      throw new NotFoundException("Timeline entity {id: " + entityId +
-          ", type: " + entityType + " } is not found");
-    }
-    return entity;
+    return getEntity(req, res, clusterId, appId, entityType, entityId,
+        userId, flowName, flowRunId, confsToRetrieve, metricsToRetrieve, fields,
+        metricsLimit, metricsTimeStart, metricsTimeEnd, entityIdPrefix, true);
   }
 
   /**
@@ -2088,7 +2120,7 @@ public class TimelineReaderWebServices {
         flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo,
         isRelatedTo, infofilters, conffilters, metricfilters, eventfilters,
         confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-        metricsTimeStart, metricsTimeEnd, fromId);
+        metricsTimeStart, metricsTimeEnd, fromId, false);
   }
 
   /**
@@ -2202,7 +2234,7 @@ public class TimelineReaderWebServices {
         flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo,
         isRelatedTo, infofilters, conffilters, metricfilters, eventfilters,
         confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-        metricsTimeStart, metricsTimeEnd, fromId);
+        metricsTimeStart, metricsTimeEnd, fromId, false);
   }
 
   /**
@@ -2310,7 +2342,7 @@ public class TimelineReaderWebServices {
         null, limit, createdTimeStart, createdTimeEnd, relatesTo, isRelatedTo,
         infofilters, conffilters, metricfilters, eventfilters,
         confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-        metricsTimeStart, metricsTimeEnd, fromId);
+        metricsTimeStart, metricsTimeEnd, fromId, false);
   }
 
   /**
@@ -2420,7 +2452,7 @@ public class TimelineReaderWebServices {
         null, limit, createdTimeStart, createdTimeEnd, relatesTo, isRelatedTo,
         infofilters, conffilters, metricfilters, eventfilters,
         confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-        metricsTimeStart, metricsTimeEnd, fromId);
+        metricsTimeStart, metricsTimeEnd, fromId, false);
   }
 
   /**


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org