You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sj...@apache.org on 2016/02/08 21:18:48 UTC

[1/4] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

Repository: hadoop
Updated Branches:
  refs/heads/YARN-2928 10a4f8ae6 -> db76a3ad0


http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
index d991b42..f9eb5b4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
@@ -24,7 +24,8 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
 /**
  * Represents the flow run table column families.
  */
-public enum FlowActivityColumnFamily implements ColumnFamily<FlowActivityTable> {
+public enum FlowActivityColumnFamily
+    implements ColumnFamily<FlowActivityTable> {
 
   /**
    * Info column family houses known columns, specifically ones included in

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
index 21ddcc2..a5933da 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
@@ -31,12 +31,13 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStor
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
 
 /**
- * Identifies partially qualified columns for the {@link FlowActivityTable}
+ * Identifies partially qualified columns for the {@link FlowActivityTable}.
  */
-public enum FlowActivityColumnPrefix implements ColumnPrefix<FlowActivityTable> {
+public enum FlowActivityColumnPrefix
+    implements ColumnPrefix<FlowActivityTable> {
 
   /**
-   * To store run ids of the flows
+   * To store run ids of the flows.
    */
   RUN_ID(FlowActivityColumnFamily.INFO, "r", null);
 
@@ -162,8 +163,8 @@ public enum FlowActivityColumnPrefix implements ColumnPrefix<FlowActivityTable>
    * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix
    * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result)
    */
-  public <T> NavigableMap<String, NavigableMap<Long, T>> readResultsWithTimestamps(
-      Result result) throws IOException {
+  public <T> NavigableMap<String, NavigableMap<Long, T>>
+      readResultsWithTimestamps(Result result) throws IOException {
     return column.readResultsWithTimestamps(result, columnPrefixBytes);
   }
 
@@ -179,8 +180,8 @@ public enum FlowActivityColumnPrefix implements ColumnPrefix<FlowActivityTable>
   public static final FlowActivityColumnPrefix columnFor(String columnPrefix) {
 
     // Match column based on value, assume column family matches.
-    for (FlowActivityColumnPrefix flowActivityColPrefix : FlowActivityColumnPrefix
-        .values()) {
+    for (FlowActivityColumnPrefix flowActivityColPrefix :
+        FlowActivityColumnPrefix.values()) {
       // Find a match based only on name.
       if (flowActivityColPrefix.getColumnPrefix().equals(columnPrefix)) {
         return flowActivityColPrefix;
@@ -209,8 +210,8 @@ public enum FlowActivityColumnPrefix implements ColumnPrefix<FlowActivityTable>
     // TODO: needs unit test to confirm and need to update javadoc to explain
     // null prefix case.
 
-    for (FlowActivityColumnPrefix flowActivityColumnPrefix : FlowActivityColumnPrefix
-        .values()) {
+    for (FlowActivityColumnPrefix flowActivityColumnPrefix :
+        FlowActivityColumnPrefix.values()) {
       // Find a match based column family and on name.
       if (flowActivityColumnPrefix.columnFamily.equals(columnFamily)
           && (((columnPrefix == null) && (flowActivityColumnPrefix

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java
index a9598ef..80b3287 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityRowKey.java
@@ -57,9 +57,9 @@ public class FlowActivityRowKey {
 
   /**
    * Constructs a row key prefix for the flow activity table as follows:
-   * {@code clusterId!}
+   * {@code clusterId!}.
    *
-   * @param clusterId
+   * @param clusterId Cluster Id.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKeyPrefix(String clusterId) {
@@ -68,10 +68,10 @@ public class FlowActivityRowKey {
 
   /**
    * Constructs a row key prefix for the flow activity table as follows:
-   * {@code clusterId!dayTimestamp!}
+   * {@code clusterId!dayTimestamp!}.
    *
-   * @param clusterId
-   * @param dayTs
+   * @param clusterId Cluster Id.
+   * @param dayTs Start of the day timestamp.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKeyPrefix(String clusterId, long dayTs) {
@@ -82,12 +82,13 @@ public class FlowActivityRowKey {
 
   /**
    * Constructs a row key for the flow activity table as follows:
-   * {@code clusterId!dayTimestamp!user!flowName}
+   * {@code clusterId!dayTimestamp!user!flowName}.
+   * Will insert into current day's record in the table. Uses current time to
+   * store top of the day timestamp.
    *
-   * Will insert into current day's record in the table
-   * @param clusterId
-   * @param userId
-   * @param flowName
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKey(String clusterId, String userId,
@@ -99,12 +100,12 @@ public class FlowActivityRowKey {
 
   /**
    * Constructs a row key for the flow activity table as follows:
-   * {@code clusterId!dayTimestamp!user!flowName}
+   * {@code clusterId!dayTimestamp!user!flowName}.
    *
-   * @param clusterId
-   * @param dayTs
-   * @param userId
-   * @param flowName
+   * @param clusterId Cluster Id.
+   * @param dayTs Top of the day timestamp.
+   * @param userId User Id.
+   * @param flowName Flow Name.
    * @return byte array for the row key
    */
   public static byte[] getRowKey(String clusterId, long dayTs, String userId,
@@ -118,6 +119,9 @@ public class FlowActivityRowKey {
 
   /**
    * Given the raw row key as bytes, returns the row key as an object.
+   *
+   * @param rowKey Byte representation of row key.
+   * @return A <cite>FlowActivityRowKey</cite> object.
    */
   public static FlowActivityRowKey parseRowKey(byte[] rowKey) {
     byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
index 315281f..8a0430c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityTable.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
  *
  * Example flow activity table record:
  *
- * </pre>
+ * <pre>
  * |-------------------------------------------|
  * |  Row key   | Column Family                |
  * |            | info                         |
@@ -52,19 +52,20 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
  * </pre>
  */
 public class FlowActivityTable extends BaseTable<FlowActivityTable> {
-  /** flow activity table prefix */
+  /** flow activity table prefix. */
   private static final String PREFIX =
       YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowactivity";
 
-  /** config param name that specifies the flowactivity table name */
+  /** config param name that specifies the flowactivity table name. */
   public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
 
-  /** default value for flowactivity table name */
-  public static final String DEFAULT_TABLE_NAME = "timelineservice.flowactivity";
+  /** default value for flowactivity table name. */
+  public static final String DEFAULT_TABLE_NAME =
+      "timelineservice.flowactivity";
 
   private static final Log LOG = LogFactory.getLog(FlowActivityTable.class);
 
-  /** default max number of versions */
+  /** default max number of versions. */
   public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE;
 
   public FlowActivityTable() {
@@ -91,16 +92,16 @@ public class FlowActivityTable extends BaseTable<FlowActivityTable> {
           + " already exists.");
     }
 
-    HTableDescriptor FlowActivityTableDescp = new HTableDescriptor(table);
+    HTableDescriptor flowActivityTableDescp = new HTableDescriptor(table);
     HColumnDescriptor infoCF =
         new HColumnDescriptor(FlowActivityColumnFamily.INFO.getBytes());
     infoCF.setBloomFilterType(BloomType.ROWCOL);
-    FlowActivityTableDescp.addFamily(infoCF);
+    flowActivityTableDescp.addFamily(infoCF);
     infoCF.setMinVersions(1);
     infoCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS);
 
     // TODO: figure the split policy before running in production
-    admin.createTable(FlowActivityTableDescp);
+    admin.createTable(flowActivityTableDescp);
     LOG.info("Status of table creation for " + table.getNameAsString() + "="
         + admin.tableExists(table));
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java
index e3bb52d..3d7c40e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumnPrefix.java
@@ -194,8 +194,8 @@ public enum FlowRunColumnPrefix implements ColumnPrefix<FlowRunTable> {
    * org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix
    * #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result)
    */
-  public <T> NavigableMap<String, NavigableMap<Long, T>> readResultsWithTimestamps(
-      Result result) throws IOException {
+  public <T> NavigableMap<String, NavigableMap<Long, T>>
+      readResultsWithTimestamps(Result result) throws IOException {
     return column.readResultsWithTimestamps(result, columnPrefixBytes);
   }
 
@@ -248,8 +248,8 @@ public enum FlowRunColumnPrefix implements ColumnPrefix<FlowRunTable> {
     for (FlowRunColumnPrefix frcp : FlowRunColumnPrefix.values()) {
       // Find a match based column family and on name.
       if (frcp.columnFamily.equals(columnFamily)
-          && (((columnPrefix == null) && (frcp.getColumnPrefix() == null)) || (frcp
-              .getColumnPrefix().equals(columnPrefix)))) {
+          && (((columnPrefix == null) && (frcp.getColumnPrefix() == null)) ||
+          (frcp.getColumnPrefix().equals(columnPrefix)))) {
         return frcp;
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java
index 1984157..9698f06 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunCoprocessor.java
@@ -46,6 +46,9 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator;
 
+/**
+ * Coprocessor for flow run table.
+ */
 public class FlowRunCoprocessor extends BaseRegionObserver {
 
   @SuppressWarnings("unused")
@@ -53,9 +56,10 @@ public class FlowRunCoprocessor extends BaseRegionObserver {
 
   private HRegion region;
   /**
-   * generate a timestamp that is unique per row in a region this is per region
+   * generate a timestamp that is unique per row in a region this is per region.
    */
-  private final TimestampGenerator timestampGenerator = new TimestampGenerator();
+  private final TimestampGenerator timestampGenerator =
+      new TimestampGenerator();
 
   @Override
   public void start(CoprocessorEnvironment e) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
index 2cd9625..0585dc9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
@@ -56,11 +56,11 @@ public class FlowRunRowKey {
 
   /**
    * Constructs a row key prefix for the flow run table as follows: {
-   * clusterId!userI!flowName!}
+   * clusterId!userI!flowName!}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
@@ -71,12 +71,12 @@ public class FlowRunRowKey {
 
   /**
    * Constructs a row key for the entity table as follows: {
-   * clusterId!userI!flowName!Inverted Flow Run Id}
+   * clusterId!userI!flowName!Inverted Flow Run Id}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
+   * @param flowRunId Run Id for the flow name.
    * @return byte array with the row key
    */
   public static byte[] getRowKey(String clusterId, String userId,
@@ -91,6 +91,9 @@ public class FlowRunRowKey {
 
   /**
    * Given the raw row key as bytes, returns the row key as an object.
+   *
+   * @param rowKey Byte representation of row key.
+   * @return A <cite>FlowRunRowKey</cite> object.
    */
   public static FlowRunRowKey parseRowKey(byte[] rowKey) {
     byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
index 2682fea..547bef0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
@@ -84,19 +84,19 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
  * </pre>
  */
 public class FlowRunTable extends BaseTable<FlowRunTable> {
-  /** entity prefix */
+  /** entity prefix. */
   private static final String PREFIX =
       YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".flowrun";
 
-  /** config param name that specifies the flowrun table name */
+  /** config param name that specifies the flowrun table name. */
   public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
 
-  /** default value for flowrun table name */
+  /** default value for flowrun table name. */
   public static final String DEFAULT_TABLE_NAME = "timelineservice.flowrun";
 
   private static final Log LOG = LogFactory.getLog(FlowRunTable.class);
 
-  /** default max number of versions */
+  /** default max number of versions. */
   public static final int DEFAULT_METRICS_MAX_VERSIONS = Integer.MAX_VALUE;
 
   public FlowRunTable() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java
index d541df0..6fefd15 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowScanner.java
@@ -90,8 +90,8 @@ class FlowScanner implements RegionScanner, Closeable {
   }
 
   @Override
-  public boolean nextRaw(List<Cell> cells, int limit) throws IOException {
-    return nextInternal(cells, limit);
+  public boolean nextRaw(List<Cell> cells, int cellLimit) throws IOException {
+    return nextInternal(cells, cellLimit);
   }
 
   @Override
@@ -100,8 +100,8 @@ class FlowScanner implements RegionScanner, Closeable {
   }
 
   @Override
-  public boolean next(List<Cell> cells, int limit) throws IOException {
-    return nextInternal(cells, limit);
+  public boolean next(List<Cell> cells, int cellLimit) throws IOException {
+    return nextInternal(cells, cellLimit);
   }
 
   private String getAggregationCompactionDimension(List<Tag> tags) {
@@ -161,11 +161,12 @@ class FlowScanner implements RegionScanner, Closeable {
    * column or returns the cell as is.
    *
    * @param cells
-   * @param limit
+   * @param cellLimit
    * @return true if next row is available for the scanner, false otherwise
    * @throws IOException
    */
-  private boolean nextInternal(List<Cell> cells, int limit) throws IOException {
+  private boolean nextInternal(List<Cell> cells, int cellLimit)
+      throws IOException {
     Cell cell = null;
     startNext();
     // Loop through all the cells in this row
@@ -183,8 +184,8 @@ class FlowScanner implements RegionScanner, Closeable {
     Set<String> alreadySeenAggDim = new HashSet<>();
     int addedCnt = 0;
     ValueConverter converter = null;
-    while (((cell = peekAtNextCell(limit)) != null)
-        && (limit <= 0 || addedCnt < limit)) {
+    while (((cell = peekAtNextCell(cellLimit)) != null)
+        && (cellLimit <= 0 || addedCnt < cellLimit)) {
       byte[] newColumnQualifier = CellUtil.cloneQualifier(cell);
       if (comp.compare(currentColumnQualifier, newColumnQualifier) != 0) {
         if (converter != null && isNumericConverter(converter)) {
@@ -198,12 +199,12 @@ class FlowScanner implements RegionScanner, Closeable {
       }
       // No operation needs to be performed on non numeric converters.
       if (!isNumericConverter(converter)) {
-        nextCell(limit);
+        nextCell(cellLimit);
         continue;
       }
       collectCells(currentColumnCells, currentAggOp, cell, alreadySeenAggDim,
           (NumericValueConverter)converter);
-      nextCell(limit);
+      nextCell(cellLimit);
     }
     if (!currentColumnCells.isEmpty()) {
       emitCells(cells, currentColumnCells, currentAggOp,
@@ -220,7 +221,7 @@ class FlowScanner implements RegionScanner, Closeable {
   }
 
   /**
-   * resets the parameters to an intialized state for next loop iteration
+   * resets the parameters to an intialized state for next loop iteration.
    *
    * @param cell
    * @param currentAggOp
@@ -278,14 +279,12 @@ class FlowScanner implements RegionScanner, Closeable {
       List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
           cell.getTagsLength());
       String aggDim = getAggregationCompactionDimension(tags);
-      if (alreadySeenAggDim.contains(aggDim)) {
-        // if this agg dimension has already been seen,
-        // since they show up in sorted order
-        // we drop the rest which are older
-        // in other words, this cell is older than previously seen cells
-        // for that agg dim
-      } else {
-        // not seen this agg dim, hence consider this cell in our working set
+
+      // If this agg dimension has already been seen, since they show up in
+      // sorted order, we drop the rest which are older. In other words, this
+      // cell is older than previously seen cells for that agg dim.
+      if (!alreadySeenAggDim.contains(aggDim)) {
+        // Not seen this agg dim, hence consider this cell in our working set
         currentColumnCells.add(cell);
         alreadySeenAggDim.add(aggDim);
       }
@@ -424,6 +423,8 @@ class FlowScanner implements RegionScanner, Closeable {
 
   /**
    * Returns whether or not the underlying scanner has more rows.
+   *
+   * @return true, if there are more cells to return, false otherwise.
    */
   public boolean hasMore() {
     return currentIndex < availableCells.size() ? true : hasMore;
@@ -434,15 +435,16 @@ class FlowScanner implements RegionScanner, Closeable {
    * pointer to the next cell. This method can be called multiple times in a row
    * to advance through all the available cells.
    *
-   * @param limit
+   * @param cellLimit
    *          the limit of number of cells to return if the next batch must be
    *          fetched by the wrapped scanner
    * @return the next available cell or null if no more cells are available for
    *         the current row
-   * @throws IOException
+   * @throws IOException if any problem is encountered while grabbing the next
+   *     cell.
    */
-  public Cell nextCell(int limit) throws IOException {
-    Cell cell = peekAtNextCell(limit);
+  public Cell nextCell(int cellLimit) throws IOException {
+    Cell cell = peekAtNextCell(cellLimit);
     if (cell != null) {
       currentIndex++;
     }
@@ -454,19 +456,20 @@ class FlowScanner implements RegionScanner, Closeable {
    * pointer. Calling this method multiple times in a row will continue to
    * return the same cell.
    *
-   * @param limit
+   * @param cellLimit
    *          the limit of number of cells to return if the next batch must be
    *          fetched by the wrapped scanner
    * @return the next available cell or null if no more cells are available for
    *         the current row
-   * @throws IOException
+   * @throws IOException if any problem is encountered while grabbing the next
+   *     cell.
    */
-  public Cell peekAtNextCell(int limit) throws IOException {
+  public Cell peekAtNextCell(int cellLimit) throws IOException {
     if (currentIndex >= availableCells.size()) {
       // done with current batch
       availableCells.clear();
       currentIndex = 0;
-      hasMore = flowRunScanner.next(availableCells, limit);
+      hasMore = flowRunScanner.next(availableCells, cellLimit);
     }
     Cell cell = null;
     if (currentIndex < availableCells.size()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
new file mode 100644
index 0000000..04963f3
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/package-info.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage.flow
+ * contains classes related to implementation for flow related tables, viz. flow
+ * run table and flow activity table.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.timelineservice.storage.flow;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
index f652ffd..e78db2a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
@@ -16,6 +16,10 @@
  * limitations under the License.
  */
 
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage contains
+ * classes which define and implement reading and writing to backend storage.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
index 387f7d7..0de09e0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
@@ -98,7 +98,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     TimelineEntityFilters filters = getFilters();
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.EVENTS) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getEventFilters() == null)) {
+        (isSingleEntityRead() || filters.getEventFilters() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -107,7 +107,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     // info not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.INFO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getInfoFilters() == null)) {
+        (isSingleEntityRead() || filters.getInfoFilters() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -116,7 +116,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     // is releated to not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.IS_RELATED_TO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getIsRelatedTo() == null)) {
+        (isSingleEntityRead() || filters.getIsRelatedTo() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -125,7 +125,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     // relates to not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.RELATES_TO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getRelatesTo() == null)) {
+        (isSingleEntityRead() || filters.getRelatesTo() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -133,7 +133,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     }
     list.addFilter(infoColFamilyList);
     if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS) ||
-        (!singleEntityRead && filters.getConfigFilters() != null)) ||
+        (!isSingleEntityRead() && filters.getConfigFilters() != null)) ||
         (dataToRetrieve.getConfsToRetrieve() != null &&
         !dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty())) {
       FilterList filterCfg =
@@ -148,7 +148,7 @@ class ApplicationEntityReader extends GenericEntityReader {
       list.addFilter(filterCfg);
     }
     if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) ||
-        (!singleEntityRead && filters.getMetricFilters() != null)) ||
+        (!isSingleEntityRead() && filters.getMetricFilters() != null)) ||
         (dataToRetrieve.getMetricsToRetrieve() != null &&
         !dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) {
       FilterList filterMetrics =
@@ -177,7 +177,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     if (filterList != null && !filterList.getFilters().isEmpty()) {
       get.setFilter(filterList);
     }
-    return table.getResult(hbaseConf, conn, get);
+    return getTable().getResult(hbaseConf, conn, get);
   }
 
   @Override
@@ -186,7 +186,7 @@ class ApplicationEntityReader extends GenericEntityReader {
         "clusterId shouldn't be null");
     Preconditions.checkNotNull(getContext().getEntityType(),
         "entityType shouldn't be null");
-    if (singleEntityRead) {
+    if (isSingleEntityRead()) {
       Preconditions.checkNotNull(getContext().getAppId(),
           "appId shouldn't be null");
     } else {
@@ -201,14 +201,14 @@ class ApplicationEntityReader extends GenericEntityReader {
   protected void augmentParams(Configuration hbaseConf, Connection conn)
       throws IOException {
     TimelineReaderContext context = getContext();
-    if (singleEntityRead) {
+    if (isSingleEntityRead()) {
       if (context.getFlowName() == null || context.getFlowRunId() == null ||
           context.getUserId() == null) {
         FlowContext flowContext = lookupFlowContext(
             context.getClusterId(), context.getAppId(), hbaseConf, conn);
-        context.setFlowName(flowContext.flowName);
-        context.setFlowRunId(flowContext.flowRunId);
-        context.setUserId(flowContext.userId);
+        context.setFlowName(flowContext.getFlowName());
+        context.setFlowRunId(flowContext.getFlowRunId());
+        context.setUserId(flowContext.getUserId());
       }
     }
     getDataToRetrieve().addFieldsBasedOnConfsAndMetricsToRetrieve();
@@ -234,7 +234,7 @@ class ApplicationEntityReader extends GenericEntityReader {
       newList.addFilter(filterList);
     }
     scan.setFilter(newList);
-    return table.getResultScanner(hbaseConf, conn, scan);
+    return getTable().getResultScanner(hbaseConf, conn, scan);
   }
 
   @Override
@@ -252,7 +252,7 @@ class ApplicationEntityReader extends GenericEntityReader {
     Number createdTime =
         (Number)ApplicationColumn.CREATED_TIME.readResult(result);
     entity.setCreatedTime(createdTime.longValue());
-    if (!singleEntityRead &&
+    if (!isSingleEntityRead() &&
         (entity.getCreatedTime() < filters.getCreatedTimeBegin() ||
         entity.getCreatedTime() > filters.getCreatedTimeEnd())) {
       return null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
index 96350da..0d2bdd8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowActivityEntityReader.java
@@ -111,7 +111,7 @@ class FlowActivityEntityReader extends TimelineEntityReader {
     // the scanner may still return more than the limit; therefore we need to
     // read the right number as we iterate
     scan.setFilter(new PageFilter(getFilters().getLimit()));
-    return table.getResultScanner(hbaseConf, conn, scan);
+    return getTable().getResultScanner(hbaseConf, conn, scan);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
index 2d1c41c..743315c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/FlowRunEntityReader.java
@@ -82,7 +82,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
         "userId shouldn't be null");
     Preconditions.checkNotNull(getContext().getFlowName(),
         "flowName shouldn't be null");
-    if (singleEntityRead) {
+    if (isSingleEntityRead()) {
       Preconditions.checkNotNull(getContext().getFlowRunId(),
           "flowRunId shouldn't be null");
     }
@@ -103,7 +103,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
            new BinaryComparator(FlowRunColumnFamily.INFO.getBytes()));
     TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
     // Metrics not required.
-    if (!singleEntityRead &&
+    if (!isSingleEntityRead() &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL)) {
       FilterList infoColFamilyList = new FilterList(Operator.MUST_PASS_ONE);
@@ -137,7 +137,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
     if (filterList != null && !filterList.getFilters().isEmpty()) {
       get.setFilter(filterList);
     }
-    return table.getResult(hbaseConf, conn, get);
+    return getTable().getResult(hbaseConf, conn, get);
   }
 
   @Override
@@ -154,7 +154,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
       newList.addFilter(filterList);
     }
     scan.setFilter(newList);
-    return table.getResultScanner(hbaseConf, conn, scan);
+    return getTable().getResultScanner(hbaseConf, conn, scan);
   }
 
   @Override
@@ -163,7 +163,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
     FlowRunEntity flowRun = new FlowRunEntity();
     flowRun.setUser(context.getUserId());
     flowRun.setName(context.getFlowName());
-    if (singleEntityRead) {
+    if (isSingleEntityRead()) {
       flowRun.setRunId(context.getFlowRunId());
     } else {
       FlowRunRowKey rowKey = FlowRunRowKey.parseRowKey(result.getRow());
@@ -175,7 +175,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
     if (startTime != null) {
       flowRun.setStartTime(startTime.longValue());
     }
-    if (!singleEntityRead &&
+    if (!isSingleEntityRead() &&
         (flowRun.getStartTime() < getFilters().getCreatedTimeBegin() ||
         flowRun.getStartTime() > getFilters().getCreatedTimeEnd())) {
       return null;
@@ -194,7 +194,7 @@ class FlowRunEntityReader extends TimelineEntityReader {
     }
 
     // read metrics
-    if (singleEntityRead ||
+    if (isSingleEntityRead() ||
         getDataToRetrieve().getFieldsToRetrieve().contains(Field.METRICS)) {
       readMetrics(flowRun, result, FlowRunColumnPrefix.METRIC);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
index 3bc2f3f..d8f73d4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
@@ -47,8 +47,8 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilter
 import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
 import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
 import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
 import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
 import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumn;
 import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey;
 import org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
@@ -118,7 +118,7 @@ class GenericEntityReader extends TimelineEntityReader {
     // Events not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.EVENTS) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getEventFilters() == null)) {
+        (isSingleEntityRead() || filters.getEventFilters() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -127,7 +127,7 @@ class GenericEntityReader extends TimelineEntityReader {
     // info not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.INFO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getInfoFilters() == null)) {
+        (isSingleEntityRead() || filters.getInfoFilters() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -136,7 +136,7 @@ class GenericEntityReader extends TimelineEntityReader {
     // is related to not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.IS_RELATED_TO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getIsRelatedTo() == null)) {
+        (isSingleEntityRead() || filters.getIsRelatedTo() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -145,7 +145,7 @@ class GenericEntityReader extends TimelineEntityReader {
     // relates to not required.
     if (!dataToRetrieve.getFieldsToRetrieve().contains(Field.RELATES_TO) &&
         !dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-        (singleEntityRead || filters.getRelatesTo() == null)) {
+        (isSingleEntityRead() || filters.getRelatesTo() == null)) {
       infoColFamilyList.addFilter(
           new QualifierFilter(CompareOp.NOT_EQUAL,
           new BinaryPrefixComparator(
@@ -153,7 +153,7 @@ class GenericEntityReader extends TimelineEntityReader {
     }
     list.addFilter(infoColFamilyList);
     if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.CONFIGS) ||
-        (!singleEntityRead && filters.getConfigFilters() != null)) ||
+        (!isSingleEntityRead() && filters.getConfigFilters() != null)) ||
         (dataToRetrieve.getConfsToRetrieve() != null &&
         !dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty())) {
       FilterList filterCfg =
@@ -167,7 +167,7 @@ class GenericEntityReader extends TimelineEntityReader {
       list.addFilter(filterCfg);
     }
     if ((dataToRetrieve.getFieldsToRetrieve().contains(Field.METRICS) ||
-        (!singleEntityRead && filters.getMetricFilters() != null)) ||
+        (!isSingleEntityRead() && filters.getMetricFilters() != null)) ||
         (dataToRetrieve.getMetricsToRetrieve() != null &&
         !dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) {
       FilterList filterMetrics =
@@ -201,14 +201,23 @@ class GenericEntityReader extends TimelineEntityReader {
   }
 
   protected static class FlowContext {
-    protected final String userId;
-    protected final String flowName;
-    protected final Long flowRunId;
+    private final String userId;
+    private final String flowName;
+    private final Long flowRunId;
     public FlowContext(String user, String flowName, Long flowRunId) {
       this.userId = user;
       this.flowName = flowName;
       this.flowRunId = flowRunId;
     }
+    protected String getUserId() {
+      return userId;
+    }
+    protected String getFlowName() {
+      return flowName;
+    }
+    protected Long getFlowRunId() {
+      return flowRunId;
+    }
   }
 
   @Override
@@ -219,7 +228,7 @@ class GenericEntityReader extends TimelineEntityReader {
         "appId shouldn't be null");
     Preconditions.checkNotNull(getContext().getEntityType(),
         "entityType shouldn't be null");
-    if (singleEntityRead) {
+    if (isSingleEntityRead()) {
       Preconditions.checkNotNull(getContext().getEntityId(),
           "entityId shouldn't be null");
     }
@@ -254,7 +263,7 @@ class GenericEntityReader extends TimelineEntityReader {
     if (filterList != null && !filterList.getFilters().isEmpty()) {
       get.setFilter(filterList);
     }
-    return table.getResult(hbaseConf, conn, get);
+    return getTable().getResult(hbaseConf, conn, get);
   }
 
   @Override
@@ -271,7 +280,7 @@ class GenericEntityReader extends TimelineEntityReader {
     if (filterList != null && !filterList.getFilters().isEmpty()) {
       scan.setFilter(filterList);
     }
-    return table.getResultScanner(hbaseConf, conn, scan);
+    return getTable().getResultScanner(hbaseConf, conn, scan);
   }
 
   @Override
@@ -289,7 +298,7 @@ class GenericEntityReader extends TimelineEntityReader {
     // fetch created time
     Number createdTime = (Number)EntityColumn.CREATED_TIME.readResult(result);
     entity.setCreatedTime(createdTime.longValue());
-    if (!singleEntityRead &&
+    if (!isSingleEntityRead() &&
         (entity.getCreatedTime() < filters.getCreatedTimeBegin() ||
         entity.getCreatedTime() > filters.getCreatedTimeEnd())) {
       return null;
@@ -401,6 +410,14 @@ class GenericEntityReader extends TimelineEntityReader {
 
   /**
    * Helper method for reading relationship.
+   *
+   * @param <T> Describes the type of column prefix.
+   * @param entity entity to fill.
+   * @param result result from HBase.
+   * @param prefix column prefix.
+   * @param isRelatedTo if true, means relationship is to be added to
+   *     isRelatedTo, otherwise its added to relatesTo.
+   * @throws IOException if any problem is encountered while reading result.
    */
   protected <T> void readRelationship(
       TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
@@ -421,6 +438,13 @@ class GenericEntityReader extends TimelineEntityReader {
 
   /**
    * Helper method for reading key-value pairs for either info or config.
+   *
+   * @param <T> Describes the type of column prefix.
+   * @param entity entity to fill.
+   * @param result result from HBase.
+   * @param prefix column prefix.
+   * @param isConfig if true, means we are reading configs, otherwise info.
+   * @throws IOException if any problem is encountered while reading result.
    */
   protected <T> void readKeyValuePairs(
       TimelineEntity entity, Result result, ColumnPrefix<T> prefix,
@@ -441,6 +465,12 @@ class GenericEntityReader extends TimelineEntityReader {
    * is of the form "eventId=timestamp=infoKey" where "infoKey" may be omitted
    * if there is no info associated with the event.
    *
+   * @param entity entity to fill.
+   * @param result HBase Result.
+   * @param isApplication if true, event read is for application table,
+   *     otherwise its being read for entity table.
+   * @throws IOException if any problem is encountered while reading result.
+   *
    * See {@link EntityTable} and {@link ApplicationTable} for a more detailed
    * schema description.
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
index 454c179..281e901 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReader.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix
 public abstract class TimelineEntityReader {
   private static final Log LOG = LogFactory.getLog(TimelineEntityReader.class);
 
-  protected final boolean singleEntityRead;
+  private final boolean singleEntityRead;
   private TimelineReaderContext context;
   private TimelineDataToRetrieve dataToRetrieve;
   // used only for multiple entity read mode
@@ -56,7 +56,7 @@ public abstract class TimelineEntityReader {
   /**
    * Main table the entity reader uses.
    */
-  protected BaseTable<?> table;
+  private BaseTable<?> table;
 
   /**
    * Specifies whether keys for this table are sorted in a manner where entities
@@ -68,6 +68,13 @@ public abstract class TimelineEntityReader {
 
   /**
    * Instantiates a reader for multiple-entity reads.
+   *
+   * @param ctxt Reader context which defines the scope in which query has to be
+   *     made.
+   * @param entityFilters Filters which limit the entities returned.
+   * @param toRetrieve Data to retrieve for each entity.
+   * @param sortedKeys Specifies whether key for this table are sorted or not.
+   *     If sorted, entities can be retrieved by created time.
    */
   protected TimelineEntityReader(TimelineReaderContext ctxt,
       TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve,
@@ -78,11 +85,15 @@ public abstract class TimelineEntityReader {
     this.dataToRetrieve = toRetrieve;
     this.filters = entityFilters;
 
-    this.table = getTable();
+    this.setTable(getTable());
   }
 
   /**
    * Instantiates a reader for single-entity reads.
+   *
+   * @param ctxt Reader context which defines the scope in which query has to be
+   *     made.
+   * @param toRetrieve Data to retrieve for each entity.
    */
   protected TimelineEntityReader(TimelineReaderContext ctxt,
       TimelineDataToRetrieve toRetrieve) {
@@ -90,13 +101,14 @@ public abstract class TimelineEntityReader {
     this.context = ctxt;
     this.dataToRetrieve = toRetrieve;
 
-    this.table = getTable();
+    this.setTable(getTable());
   }
 
   /**
    * Creates a {@link FilterList} based on fields, confs and metrics to
    * retrieve. This filter list will be set in Scan/Get objects to trim down
    * results fetched from HBase back-end storage.
+   *
    * @return a {@link FilterList} object.
    */
   protected abstract FilterList constructFilterListBasedOnFields();
@@ -115,6 +127,12 @@ public abstract class TimelineEntityReader {
 
   /**
    * Reads and deserializes a single timeline entity from the HBase storage.
+   *
+   * @param hbaseConf HBase Configuration.
+   * @param conn HBase Connection.
+   * @return A <cite>TimelineEntity</cite> object.
+   * @throws IOException if there is any exception encountered while reading
+   *     entity.
    */
   public TimelineEntity readEntity(Configuration hbaseConf, Connection conn)
       throws IOException {
@@ -136,6 +154,11 @@ public abstract class TimelineEntityReader {
    * Reads and deserializes a set of timeline entities from the HBase storage.
    * It goes through all the results available, and returns the number of
    * entries as specified in the limit in the entity's natural sort order.
+   *
+   * @param hbaseConf HBase Configuration.
+   * @param conn HBase Connection.
+   * @return a set of <cite>TimelineEntity</cite> objects.
+   * @throws IOException if any exception is encountered while reading entities.
    */
   public Set<TimelineEntity> readEntities(Configuration hbaseConf,
       Connection conn) throws IOException {
@@ -170,8 +193,12 @@ public abstract class TimelineEntityReader {
 
   /**
    * Returns the main table to be used by the entity reader.
+   *
+   * @return A reference to the table.
    */
-  protected abstract BaseTable<?> getTable();
+  protected BaseTable<?> getTable() {
+    return table;
+  }
 
   /**
    * Validates the required parameters to read the entities.
@@ -180,6 +207,10 @@ public abstract class TimelineEntityReader {
 
   /**
    * Sets certain parameters to defaults if the values are not provided.
+   *
+   * @param hbaseConf HBase Configuration.
+   * @param conn HBase Connection.
+   * @throws IOException if any exception is encountered while setting params.
    */
   protected abstract void augmentParams(Configuration hbaseConf,
       Connection conn) throws IOException;
@@ -187,23 +218,35 @@ public abstract class TimelineEntityReader {
   /**
    * Fetches a {@link Result} instance for a single-entity read.
    *
+   * @param hbaseConf HBase Configuration.
+   * @param conn HBase Connection.
+   * @param filterList filter list which will be applied to HBase Get.
    * @return the {@link Result} instance or null if no such record is found.
+   * @throws IOException if any exception is encountered while getting result.
    */
   protected abstract Result getResult(Configuration hbaseConf, Connection conn,
       FilterList filterList) throws IOException;
 
   /**
    * Fetches a {@link ResultScanner} for a multi-entity read.
+   *
+   * @param hbaseConf HBase Configuration.
+   * @param conn HBase Connection.
+   * @param filterList filter list which will be applied to HBase Scan.
+   * @return the {@link ResultScanner} instance.
+   * @throws IOException if any exception is encountered while getting results.
    */
   protected abstract ResultScanner getResults(Configuration hbaseConf,
       Connection conn, FilterList filterList) throws IOException;
 
   /**
-   * Given a {@link Result} instance, deserializes and creates a
-   * {@link TimelineEntity}.
+   * Parses the result retrieved from HBase backend and convert it into a
+   * {@link TimelineEntity} object.
    *
-   * @return the {@link TimelineEntity} instance, or null if the {@link Result}
-   * is null or empty.
+   * @param result Single row result of a Get/Scan.
+   * @return the <cite>TimelineEntity</cite> instance or null if the entity is
+   *     filtered.
+   * @throws IOException if any exception is encountered while parsing entity.
    */
   protected abstract TimelineEntity parseEntity(Result result)
       throws IOException;
@@ -212,6 +255,11 @@ public abstract class TimelineEntityReader {
    * Helper method for reading and deserializing {@link TimelineMetric} objects
    * using the specified column prefix. The timeline metrics then are added to
    * the given timeline entity.
+   *
+   * @param entity {@link TimelineEntity} object.
+   * @param result {@link Result} object retrieved from backend.
+   * @param columnPrefix Metric column prefix
+   * @throws IOException if any exception is encountered while reading metrics.
    */
   protected void readMetrics(TimelineEntity entity, Result result,
       ColumnPrefix<?> columnPrefix) throws IOException {
@@ -229,4 +277,18 @@ public abstract class TimelineEntityReader {
       entity.addMetric(metric);
     }
   }
+
+  /**
+   * Checks whether the reader has been created to fetch single entity or
+   * multiple entities.
+   *
+   * @return true, if query is for single entity, false otherwise.
+   */
+  public boolean isSingleEntityRead() {
+    return singleEntityRead;
+  }
+
+  protected void setTable(BaseTable<?> baseTable) {
+    this.table = baseTable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
index f2bdacd..b2a9476 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/TimelineEntityReaderFactory.java
@@ -25,10 +25,19 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContex
 /**
  * Factory methods for instantiating a timeline entity reader.
  */
-public class TimelineEntityReaderFactory {
+public final class TimelineEntityReaderFactory {
+  private TimelineEntityReaderFactory() {
+  }
+
   /**
    * Creates a timeline entity reader instance for reading a single entity with
    * the specified input.
+   *
+   * @param context Reader context which defines the scope in which query has to
+   *     be made.
+   * @param dataToRetrieve Data to retrieve for each entity.
+   * @return An implementation of <cite>TimelineEntityReader</cite> object
+   *     depending on entity type.
    */
   public static TimelineEntityReader createSingleEntityReader(
       TimelineReaderContext context, TimelineDataToRetrieve dataToRetrieve) {
@@ -51,6 +60,13 @@ public class TimelineEntityReaderFactory {
   /**
    * Creates a timeline entity reader instance for reading set of entities with
    * the specified input and predicates.
+   *
+   * @param context Reader context which defines the scope in which query has to
+   *     be made.
+   * @param filters Filters which limit the entities returned.
+   * @param dataToRetrieve Data to retrieve for each entity.
+   * @return An implementation of <cite>TimelineEntityReader</cite> object
+   *     depending on entity type.
    */
   public static TimelineEntityReader createMultipleEntitiesReader(
       TimelineReaderContext context, TimelineEntityFilters filters,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
index 0b3fa38..9814d6d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/package-info.java
@@ -15,6 +15,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage.reader
+ * contains classes used to read entities from backend based on query type.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage.reader;


[4/4] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

Posted by sj...@apache.org.
YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/db76a3ad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/db76a3ad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/db76a3ad

Branch: refs/heads/YARN-2928
Commit: db76a3ad03682fdafcbec5276eef11ecdbd2719d
Parents: 10a4f8a
Author: Sangjin Lee <sj...@apache.org>
Authored: Mon Feb 8 12:17:43 2016 -0800
Committer: Sangjin Lee <sj...@apache.org>
Committed: Mon Feb 8 12:17:43 2016 -0800

----------------------------------------------------------------------
 .../jobhistory/JobHistoryEventHandler.java      | 170 ++++++++++---------
 .../hadoop/mapreduce/jobhistory/TestEvents.java |   2 +-
 .../mapreduce/util/JobHistoryEventUtils.java    |   7 +-
 .../hadoop/mapred/TimelineEntityConverter.java  |   6 +-
 .../hadoop/mapreduce/JobHistoryFileParser.java  |   3 +
 hadoop-yarn-project/CHANGES.txt                 |   3 +
 .../ApplicationAttemptEntity.java               |   9 +-
 .../timelineservice/ApplicationEntity.java      |   9 +-
 .../records/timelineservice/ClusterEntity.java  |   6 +-
 .../timelineservice/ContainerEntity.java        |   9 +-
 .../records/timelineservice/FlowRunEntity.java  |   9 +-
 .../HierarchicalTimelineEntity.java             |   8 +-
 .../records/timelineservice/QueueEntity.java    |   6 +-
 .../timelineservice/TimelineEntities.java       |  11 +-
 .../records/timelineservice/TimelineEntity.java | 106 ++++++------
 .../timelineservice/TimelineEntityType.java     |  71 +++++---
 .../records/timelineservice/TimelineEvent.java  |  30 ++--
 .../records/timelineservice/TimelineMetric.java |  39 +++--
 .../timelineservice/TimelineWriteResponse.java  |  59 +++----
 .../api/records/timelineservice/UserEntity.java |   6 +-
 .../records/timelineservice/package-info.java   |   8 +-
 .../hadoop/yarn/conf/YarnConfiguration.java     |   9 +-
 .../hadoop/yarn/client/api/TimelineClient.java  |  13 +-
 .../client/api/impl/TimelineClientImpl.java     |  16 +-
 .../yarn/util/timeline/TimelineUtils.java       |  21 ++-
 .../yarn/server/nodemanager/NodeManager.java    |   2 +-
 .../collectormanager/NMCollectorService.java    |   9 +-
 .../collectormanager/package-info.java          |  28 +++
 .../timelineservice/NMTimelineEvent.java        |   4 +
 .../timelineservice/NMTimelineEventType.java    |   3 +
 .../timelineservice/NMTimelinePublisher.java    |  14 +-
 .../timelineservice/package-info.java           |  29 ++++
 .../resourcemanager/RMActiveServiceContext.java |   8 +-
 .../server/resourcemanager/RMContextImpl.java   |   4 +-
 .../metrics/AbstractSystemMetricsPublisher.java |  20 ++-
 .../metrics/NoOpSystemMetricPublisher.java      |   2 +-
 .../metrics/SystemMetricsPublisher.java         |   3 +
 .../metrics/TimelineServiceV1Publisher.java     |   8 +-
 .../metrics/TimelineServiceV2Publisher.java     |   7 +-
 .../resourcemanager/metrics/package-info.java   |  28 +++
 .../rmapp/RMAppCollectorUpdateEvent.java        |   3 +
 .../server/resourcemanager/rmapp/RMAppImpl.java |   5 +-
 .../RMTimelineCollectorManager.java             |  33 ++--
 .../timelineservice/package-info.java           |  28 +++
 .../collector/AppLevelTimelineCollector.java    |   3 +-
 .../collector/NodeTimelineCollectorManager.java |   8 +-
 .../PerNodeTimelineCollectorsAuxService.java    |   2 +
 .../collector/TimelineCollector.java            |   2 +
 .../collector/TimelineCollectorManager.java     |  12 +-
 .../collector/TimelineCollectorWebService.java  |  70 +++++---
 .../timelineservice/collector/package-info.java |  29 ++++
 .../reader/TimelineReaderManager.java           |  32 +++-
 .../reader/TimelineReaderServer.java            |   2 +-
 .../reader/TimelineReaderWebServices.java       |  28 +--
 .../reader/TimelineReaderWebServicesUtils.java  |  50 +++---
 .../reader/TimelineUIDConverter.java            |  10 +-
 .../reader/filter/TimelineFilterUtils.java      |   8 +-
 .../timelineservice/reader/package-info.java    |   6 +
 .../storage/FileSystemTimelineReaderImpl.java   |  70 ++++----
 .../storage/FileSystemTimelineWriterImpl.java   |   7 +-
 .../storage/HBaseTimelineReaderImpl.java        |   3 +
 .../storage/HBaseTimelineWriterImpl.java        |  29 ++--
 .../storage/OfflineAggregationWriter.java       |  13 +-
 .../PhoenixOfflineAggregationWriterImpl.java    |  27 +--
 .../storage/TimelineAggregationTrack.java       |   2 +-
 .../timelineservice/storage/TimelineReader.java |   6 +-
 .../storage/TimelineSchemaCreator.java          |   4 +-
 .../timelineservice/storage/TimelineWriter.java |  15 +-
 .../storage/application/ApplicationColumn.java  |   4 +-
 .../application/ApplicationColumnPrefix.java    |   8 +-
 .../storage/application/ApplicationRowKey.java  |  33 ++--
 .../storage/application/ApplicationTable.java   |  16 +-
 .../storage/application/package-info.java       |   4 +
 .../storage/apptoflow/AppToFlowColumn.java      |   6 +-
 .../apptoflow/AppToFlowColumnFamily.java        |   2 +-
 .../storage/apptoflow/AppToFlowRowKey.java      |   9 +-
 .../storage/apptoflow/AppToFlowTable.java       |   6 +-
 .../storage/apptoflow/package-info.java         |   5 +
 .../storage/common/BaseTable.java               |  21 ++-
 .../common/BufferedMutatorDelegator.java        |   2 +-
 .../timelineservice/storage/common/Column.java  |  16 +-
 .../storage/common/ColumnFamily.java            |   2 +-
 .../storage/common/ColumnHelper.java            |  24 +--
 .../storage/common/ColumnPrefix.java            |  22 ++-
 .../storage/common/LongConverter.java           |   5 +-
 .../storage/common/NumericValueConverter.java   |   7 +-
 .../storage/common/OfflineAggregationInfo.java  |  61 ++++---
 .../timelineservice/storage/common/Range.java   |   3 +
 .../storage/common/Separator.java               |  20 ++-
 .../common/TimelineHBaseSchemaConstants.java    |  33 ++--
 .../storage/common/TimelineStorageUtils.java    |  79 +++++----
 .../storage/common/TimestampGenerator.java      |  12 +-
 .../storage/common/ValueConverter.java          |  10 +-
 .../storage/entity/EntityColumn.java            |   2 +-
 .../storage/entity/EntityColumnPrefix.java      |   9 +-
 .../storage/entity/EntityRowKey.java            |  54 +++---
 .../storage/entity/EntityTable.java             |  16 +-
 .../storage/entity/package-info.java            |   4 +
 .../flow/AggregationCompactionDimension.java    |   6 +-
 .../storage/flow/AggregationOperation.java      |  10 +-
 .../storage/flow/FlowActivityColumnFamily.java  |   3 +-
 .../storage/flow/FlowActivityColumnPrefix.java  |  19 ++-
 .../storage/flow/FlowActivityRowKey.java        |  34 ++--
 .../storage/flow/FlowActivityTable.java         |  19 ++-
 .../storage/flow/FlowRunColumnPrefix.java       |   8 +-
 .../storage/flow/FlowRunCoprocessor.java        |   8 +-
 .../storage/flow/FlowRunRowKey.java             |  21 ++-
 .../storage/flow/FlowRunTable.java              |   8 +-
 .../storage/flow/FlowScanner.java               |  57 ++++---
 .../storage/flow/package-info.java              |  29 ++++
 .../timelineservice/storage/package-info.java   |   4 +
 .../storage/reader/ApplicationEntityReader.java |  28 +--
 .../reader/FlowActivityEntityReader.java        |   2 +-
 .../storage/reader/FlowRunEntityReader.java     |  14 +-
 .../storage/reader/GenericEntityReader.java     |  58 +++++--
 .../storage/reader/TimelineEntityReader.java    |  80 ++++++++-
 .../reader/TimelineEntityReaderFactory.java     |  18 +-
 .../storage/reader/package-info.java            |   5 +
 118 files changed, 1420 insertions(+), 818 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 129ff5a..6e5afb1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -135,7 +135,8 @@ public class JobHistoryEventHandler extends AbstractService
 
   private static String MAPREDUCE_JOB_ENTITY_TYPE = "MAPREDUCE_JOB";
   private static String MAPREDUCE_TASK_ENTITY_TYPE = "MAPREDUCE_TASK";
-  private static String MAPREDUCE_TASK_ATTEMPT_ENTITY_TYPE = "MAPREDUCE_TASK_ATTEMPT";
+  private static final String MAPREDUCE_TASK_ATTEMPT_ENTITY_TYPE =
+      "MAPREDUCE_TASK_ATTEMPT";
 
   public JobHistoryEventHandler(AppContext context, int startCount) {
     super("JobHistoryEventHandler");
@@ -467,8 +468,9 @@ public class JobHistoryEventHandler extends AbstractService
     try {
       if (!threadPool.awaitTermination(60, TimeUnit.SECONDS)) {
         threadPool.shutdownNow(); 
-        if (!threadPool.awaitTermination(60, TimeUnit.SECONDS))
+        if (!threadPool.awaitTermination(60, TimeUnit.SECONDS)) {
           LOG.error("ThreadPool did not terminate");
+        }
       }
     } catch (InterruptedException ie) {
       threadPool.shutdownNow();
@@ -1061,7 +1063,7 @@ public class JobHistoryEventHandler extends AbstractService
       default:
         break;
     }
-    
+
     try {
       timelineClient.putEntities(tEntity);
     } catch (IOException|YarnException ex) {
@@ -1070,12 +1072,12 @@ public class JobHistoryEventHandler extends AbstractService
     }
   }
   
-  private void putEntityWithoutBlocking(final TimelineClient timelineClient, 
+  private void putEntityWithoutBlocking(final TimelineClient client,
       final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity) {
     Runnable publishWrapper = new Runnable() {
       public void run() {
         try {
-          timelineClient.putEntities(entity);
+          client.putEntities(entity);
         } catch (IOException|YarnException e) {
           LOG.error("putEntityNonBlocking get failed: " + e);
           throw new RuntimeException(e.toString());
@@ -1141,87 +1143,92 @@ public class JobHistoryEventHandler extends AbstractService
     entity.addIsRelatedToEntity(relatedTaskEntity, taskId);
     return entity;
   }
-  
-  private void processEventForNewTimelineService(HistoryEvent event, JobId jobId,
-      long timestamp) {
-    org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity tEntity = null;
+
+  private void processEventForNewTimelineService(HistoryEvent event,
+      JobId jobId, long timestamp) {
+    org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity tEntity =
+        null;
     String taskId = null;
     String taskAttemptId = null;
     boolean setCreatedTime = false;
 
     switch (event.getEventType()) {
-      // Handle job events
-      case JOB_SUBMITTED:
-        setCreatedTime = true;
-        break;
-      case JOB_STATUS_CHANGED:
-      case JOB_INFO_CHANGED:
-      case JOB_INITED:
-      case JOB_PRIORITY_CHANGED:
-      case JOB_QUEUE_CHANGED:
-      case JOB_FAILED:
-      case JOB_KILLED:
-      case JOB_ERROR:
-      case JOB_FINISHED:
-      case AM_STARTED:
-      case NORMALIZED_RESOURCE:
-        break;
-      // Handle task events
-      case TASK_STARTED:
-        setCreatedTime = true;
-        taskId = ((TaskStartedEvent)event).getTaskId().toString();
-        break;
-      case TASK_FAILED:
-        taskId = ((TaskFailedEvent)event).getTaskId().toString();
-        break;
-      case TASK_UPDATED:
-        taskId = ((TaskUpdatedEvent)event).getTaskId().toString();
-        break;
-      case TASK_FINISHED:
-        taskId = ((TaskFinishedEvent)event).getTaskId().toString();
-        break;
-      case MAP_ATTEMPT_STARTED:
-      case REDUCE_ATTEMPT_STARTED:
-        setCreatedTime = true;
-        taskId = ((TaskAttemptStartedEvent)event).getTaskId().toString();
-        taskAttemptId = ((TaskAttemptStartedEvent)event).
-            getTaskAttemptId().toString();
-        break;
-      case CLEANUP_ATTEMPT_STARTED:
-      case SETUP_ATTEMPT_STARTED:
-        taskId = ((TaskAttemptStartedEvent)event).getTaskId().toString();
-        taskAttemptId = ((TaskAttemptStartedEvent)event).
-            getTaskAttemptId().toString();
-        break;
-      case MAP_ATTEMPT_FAILED:
-      case CLEANUP_ATTEMPT_FAILED:
-      case REDUCE_ATTEMPT_FAILED:
-      case SETUP_ATTEMPT_FAILED:
-      case MAP_ATTEMPT_KILLED:
-      case CLEANUP_ATTEMPT_KILLED:
-      case REDUCE_ATTEMPT_KILLED:
-      case SETUP_ATTEMPT_KILLED:
-        taskId = ((TaskAttemptUnsuccessfulCompletionEvent)event).getTaskId().toString();
-        taskAttemptId = ((TaskAttemptUnsuccessfulCompletionEvent)event).
-            getTaskAttemptId().toString();
-        break;
-      case MAP_ATTEMPT_FINISHED:
-        taskId = ((MapAttemptFinishedEvent)event).getTaskId().toString();
-        taskAttemptId = ((MapAttemptFinishedEvent)event).getAttemptId().toString();
-        break;
-      case REDUCE_ATTEMPT_FINISHED:
-        taskId = ((ReduceAttemptFinishedEvent)event).getTaskId().toString();
-        taskAttemptId = ((ReduceAttemptFinishedEvent)event).getAttemptId().toString();
-        break;
-      case SETUP_ATTEMPT_FINISHED:
-      case CLEANUP_ATTEMPT_FINISHED:
-        taskId = ((TaskAttemptFinishedEvent)event).getTaskId().toString();
-        taskAttemptId = ((TaskAttemptFinishedEvent)event).getAttemptId().toString();
-        break;
-      default:
-        LOG.warn("EventType: " + event.getEventType() + " cannot be recognized" +
-            " and handled by timeline service.");
-        return;
+    // Handle job events
+    case JOB_SUBMITTED:
+      setCreatedTime = true;
+      break;
+    case JOB_STATUS_CHANGED:
+    case JOB_INFO_CHANGED:
+    case JOB_INITED:
+    case JOB_PRIORITY_CHANGED:
+    case JOB_QUEUE_CHANGED:
+    case JOB_FAILED:
+    case JOB_KILLED:
+    case JOB_ERROR:
+    case JOB_FINISHED:
+    case AM_STARTED:
+    case NORMALIZED_RESOURCE:
+      break;
+    // Handle task events
+    case TASK_STARTED:
+      setCreatedTime = true;
+      taskId = ((TaskStartedEvent)event).getTaskId().toString();
+      break;
+    case TASK_FAILED:
+      taskId = ((TaskFailedEvent)event).getTaskId().toString();
+      break;
+    case TASK_UPDATED:
+      taskId = ((TaskUpdatedEvent)event).getTaskId().toString();
+      break;
+    case TASK_FINISHED:
+      taskId = ((TaskFinishedEvent)event).getTaskId().toString();
+      break;
+    case MAP_ATTEMPT_STARTED:
+    case REDUCE_ATTEMPT_STARTED:
+      setCreatedTime = true;
+      taskId = ((TaskAttemptStartedEvent)event).getTaskId().toString();
+      taskAttemptId = ((TaskAttemptStartedEvent)event).
+          getTaskAttemptId().toString();
+      break;
+    case CLEANUP_ATTEMPT_STARTED:
+    case SETUP_ATTEMPT_STARTED:
+      taskId = ((TaskAttemptStartedEvent)event).getTaskId().toString();
+      taskAttemptId = ((TaskAttemptStartedEvent)event).
+          getTaskAttemptId().toString();
+      break;
+    case MAP_ATTEMPT_FAILED:
+    case CLEANUP_ATTEMPT_FAILED:
+    case REDUCE_ATTEMPT_FAILED:
+    case SETUP_ATTEMPT_FAILED:
+    case MAP_ATTEMPT_KILLED:
+    case CLEANUP_ATTEMPT_KILLED:
+    case REDUCE_ATTEMPT_KILLED:
+    case SETUP_ATTEMPT_KILLED:
+      taskId = ((TaskAttemptUnsuccessfulCompletionEvent)event).
+          getTaskId().toString();
+      taskAttemptId = ((TaskAttemptUnsuccessfulCompletionEvent)event).
+          getTaskAttemptId().toString();
+      break;
+    case MAP_ATTEMPT_FINISHED:
+      taskId = ((MapAttemptFinishedEvent)event).getTaskId().toString();
+      taskAttemptId = ((MapAttemptFinishedEvent)event).
+          getAttemptId().toString();
+      break;
+    case REDUCE_ATTEMPT_FINISHED:
+      taskId = ((ReduceAttemptFinishedEvent)event).getTaskId().toString();
+      taskAttemptId = ((ReduceAttemptFinishedEvent)event).
+          getAttemptId().toString();
+      break;
+    case SETUP_ATTEMPT_FINISHED:
+    case CLEANUP_ATTEMPT_FINISHED:
+      taskId = ((TaskAttemptFinishedEvent)event).getTaskId().toString();
+      taskAttemptId = ((TaskAttemptFinishedEvent)event).
+          getAttemptId().toString();
+      break;
+    default:
+      LOG.warn("EventType: " + event.getEventType() + " cannot be recognized" +
+          " and handled by timeline service.");
+      return;
     }
     if (taskId == null) {
       // JobEntity
@@ -1240,7 +1247,6 @@ public class JobHistoryEventHandler extends AbstractService
             taskId, setCreatedTime);
       }
     }
-
     putEntityWithoutBlocking(timelineClient, tEntity);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
index e7d5006..6eb8a43 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
@@ -405,7 +405,7 @@ public class TestEvents {
     public void setDatum(Object datum) {
       this.datum = datum;
     }
-    
+
     @Override
     public TimelineEvent toTimelineEvent() {
       return null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
index f4896ff..e60380b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
@@ -26,7 +26,12 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.node.ArrayNode;
 import org.codehaus.jackson.node.ObjectNode;
 
-public class JobHistoryEventUtils {
+/**
+ * Class containing utility methods to be used by JobHistoryEventHandler.
+ */
+public final class JobHistoryEventUtils {
+  private JobHistoryEventUtils() {
+  }
 
   public static JsonNode countersToJSON(Counters counters) {
     ObjectMapper mapper = new ObjectMapper();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java
index 80928dc..880014b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java
@@ -125,7 +125,8 @@ class TimelineEntityConverter {
     }
   }
 
-  private Set<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
+  private Set<TimelineEntity> createTaskAndTaskAttemptEntities(
+      JobInfo jobInfo) {
     Set<TimelineEntity> entities = new HashSet<>();
     Map<TaskID,TaskInfo> taskInfoMap = jobInfo.getAllTasks();
     LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
@@ -175,7 +176,8 @@ class TimelineEntityConverter {
     return taskAttempts;
   }
 
-  private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
+  private TimelineEntity createTaskAttemptEntity(
+      TaskAttemptInfo taskAttemptInfo) {
     TimelineEntity taskAttempt = new TimelineEntity();
     taskAttempt.setType(TASK_ATTEMPT);
     taskAttempt.setId(taskAttemptInfo.getAttemptId().toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileParser.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileParser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileParser.java
index c290cd6..5d9dc0b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileParser.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileParser.java
@@ -28,6 +28,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 
+/**
+ * Used to parse job history and configuration files.
+ */
 class JobHistoryFileParser {
   private static final Log LOG = LogFactory.getLog(JobHistoryFileParser.class);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ae5d907..f6bf667 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -229,6 +229,9 @@ Branch YARN-2928: Timeline Server Next Generation: Phase 1
     YARN-4644. TestRMRestart fails and findbugs issue in YARN-2928 branch.
     (Varun Saxena via Naganarasimha G R)
 
+    YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun
+    Saxena via sjlee)
+
 Trunk - Unreleased
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationAttemptEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationAttemptEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationAttemptEntity.java
index 734c741..053d84e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationAttemptEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationAttemptEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents an application attempt.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class ApplicationAttemptEntity extends HierarchicalTimelineEntity {
@@ -29,8 +32,10 @@ public class ApplicationAttemptEntity extends HierarchicalTimelineEntity {
 
   public ApplicationAttemptEntity(TimelineEntity entity) {
     super(entity);
-    if (!entity.getType().equals(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+    if (!entity.getType().equals(
+        TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString())) {
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationEntity.java
index 183d8d8..6075ec4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ApplicationEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents an application.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class ApplicationEntity extends HierarchicalTimelineEntity {
@@ -32,8 +35,10 @@ public class ApplicationEntity extends HierarchicalTimelineEntity {
 
   public ApplicationEntity(TimelineEntity entity) {
     super(entity);
-    if (!entity.getType().equals(TimelineEntityType.YARN_APPLICATION.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+    if (!entity.getType().equals(
+        TimelineEntityType.YARN_APPLICATION.toString())) {
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ClusterEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ClusterEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ClusterEntity.java
index 94eefa8..1f96505 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ClusterEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ClusterEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents a YARN cluster.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class ClusterEntity extends HierarchicalTimelineEntity {
@@ -30,7 +33,8 @@ public class ClusterEntity extends HierarchicalTimelineEntity {
   public ClusterEntity(TimelineEntity entity) {
     super(entity);
     if (!entity.getType().equals(TimelineEntityType.YARN_CLUSTER.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ContainerEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ContainerEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ContainerEntity.java
index c7d6bce..f61920f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ContainerEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/ContainerEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents a container belonging to an application attempt.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class ContainerEntity extends HierarchicalTimelineEntity {
@@ -29,8 +32,10 @@ public class ContainerEntity extends HierarchicalTimelineEntity {
 
   public ContainerEntity(TimelineEntity entity) {
     super(entity);
-    if (!entity.getType().equals(TimelineEntityType.YARN_CONTAINER.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+    if (!entity.getType().equals(
+        TimelineEntityType.YARN_CONTAINER.toString())) {
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java
index 3c3ffb4..410a1bf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/FlowRunEntity.java
@@ -22,6 +22,9 @@ import javax.xml.bind.annotation.XmlElement;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents a flow run.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class FlowRunEntity extends HierarchicalTimelineEntity {
@@ -44,8 +47,10 @@ public class FlowRunEntity extends HierarchicalTimelineEntity {
 
   public FlowRunEntity(TimelineEntity entity) {
     super(entity);
-    if (!entity.getType().equals(TimelineEntityType.YARN_FLOW_RUN.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+    if (!entity.getType().equals(
+        TimelineEntityType.YARN_FLOW_RUN.toString())) {
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
     // set config to null
     setConfigs(null);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
index 6235da0..4744e39 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.yarn.api.records.timelineservice;
 
-import com.google.common.base.Joiner;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Set;
 
+/**
+ * This class extends timeline entity and defines parent-child relationships
+ * with other entities.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public abstract class HierarchicalTimelineEntity extends TimelineEntity {
@@ -66,6 +67,7 @@ public abstract class HierarchicalTimelineEntity extends TimelineEntity {
     setParent(new Identifier(type, id));
   }
 
+  @SuppressWarnings("unchecked")
   public Set<Identifier> getChildren() {
     Object identifiers = getInfo().get(CHILDREN_INFO_KEY);
     if (identifiers == null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/QueueEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/QueueEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/QueueEntity.java
index d1a1f19..b654450 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/QueueEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/QueueEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents a queue.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class QueueEntity extends HierarchicalTimelineEntity {
@@ -30,7 +33,8 @@ public class QueueEntity extends HierarchicalTimelineEntity {
   public QueueEntity(TimelineEntity entity) {
     super(entity);
     if (!entity.getType().equals(TimelineEntityType.YARN_QUEUE.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java
index 39504cc..f08a0ec 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntities.java
@@ -27,6 +27,9 @@ import javax.xml.bind.annotation.XmlRootElement;
 import java.util.HashSet;
 import java.util.Set;
 
+/**
+ * This class hosts a set of timeline entities.
+ */
 @XmlRootElement(name = "entities")
 @XmlAccessorType(XmlAccessType.NONE)
 @InterfaceAudience.Public
@@ -44,12 +47,12 @@ public class TimelineEntities {
     return entities;
   }
 
-  public void setEntities(Set<TimelineEntity> entities) {
-    this.entities = entities;
+  public void setEntities(Set<TimelineEntity> timelineEntities) {
+    this.entities = timelineEntities;
   }
 
-  public void addEntities(Set<TimelineEntity> entities) {
-    this.entities.addAll(entities);
+  public void addEntities(Set<TimelineEntity> timelineEntities) {
+    this.entities.addAll(timelineEntities);
   }
 
   public void addEntity(TimelineEntity entity) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
index a661f7a..acc132e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
@@ -54,6 +54,9 @@ import java.util.TreeSet;
 public class TimelineEntity implements Comparable<TimelineEntity> {
   protected final static String SYSTEM_INFO_KEY_PREFIX = "SYSTEM_INFO_";
 
+  /**
+   * Identifier of timeline entity(entity id + entity type).
+   */
   @XmlRootElement(name = "identifier")
   @XmlAccessorType(XmlAccessType.NONE)
   public static class Identifier {
@@ -74,8 +77,8 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
       return type;
     }
 
-    public void setType(String type) {
-      this.type = type;
+    public void setType(String entityType) {
+      this.type = entityType;
     }
 
     @XmlElement(name = "id")
@@ -83,8 +86,8 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
       return id;
     }
 
-    public void setId(String id) {
-      this.id = id;
+    public void setId(String entityId) {
+      this.id = entityId;
     }
 
     @Override
@@ -106,8 +109,9 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
 
     @Override
     public boolean equals(Object obj) {
-      if (this == obj)
+      if (this == obj) {
         return true;
+      }
       if (!(obj instanceof Identifier)) {
         return false;
       }
@@ -208,11 +212,11 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void setIdentifier(Identifier identifier) {
+  public void setIdentifier(Identifier entityIdentifier) {
     if (real == null) {
-      this.identifier = identifier;
+      this.identifier = entityIdentifier;
     } else {
-      real.setIdentifier(identifier);
+      real.setIdentifier(entityIdentifier);
     }
   }
 
@@ -235,19 +239,19 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void setInfo(Map<String, Object> info) {
+  public void setInfo(Map<String, Object> entityInfos) {
     if (real == null) {
-      this.info = TimelineServiceHelper.mapCastToHashMap(info);
+      this.info = TimelineServiceHelper.mapCastToHashMap(entityInfos);
     } else {
-      real.setInfo(info);
+      real.setInfo(entityInfos);
     }
   }
 
-  public void addInfo(Map<String, Object> info) {
+  public void addInfo(Map<String, Object> entityInfos) {
     if (real == null) {
-      this.info.putAll(info);
+      this.info.putAll(entityInfos);
     } else {
-      real.addInfo(info);
+      real.addInfo(entityInfos);
     }
   }
 
@@ -278,19 +282,19 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void setConfigs(Map<String, String> configs) {
+  public void setConfigs(Map<String, String> entityConfigs) {
     if (real == null) {
-      this.configs = TimelineServiceHelper.mapCastToHashMap(configs);
+      this.configs = TimelineServiceHelper.mapCastToHashMap(entityConfigs);
     } else {
-      real.setConfigs(configs);
+      real.setConfigs(entityConfigs);
     }
   }
 
-  public void addConfigs(Map<String, String> configs) {
+  public void addConfigs(Map<String, String> entityConfigs) {
     if (real == null) {
-      this.configs.putAll(configs);
+      this.configs.putAll(entityConfigs);
     } else {
-      real.addConfigs(configs);
+      real.addConfigs(entityConfigs);
     }
   }
 
@@ -311,19 +315,19 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void setMetrics(Set<TimelineMetric> metrics) {
+  public void setMetrics(Set<TimelineMetric> entityMetrics) {
     if (real == null) {
-      this.metrics = metrics;
+      this.metrics = entityMetrics;
     } else {
-      real.setMetrics(metrics);
+      real.setMetrics(entityMetrics);
     }
   }
 
-  public void addMetrics(Set<TimelineMetric> metrics) {
+  public void addMetrics(Set<TimelineMetric> entityMetrics) {
     if (real == null) {
-      this.metrics.addAll(metrics);
+      this.metrics.addAll(entityMetrics);
     } else {
-      real.addMetrics(metrics);
+      real.addMetrics(entityMetrics);
     }
   }
 
@@ -344,19 +348,19 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void setEvents(NavigableSet<TimelineEvent> events) {
+  public void setEvents(NavigableSet<TimelineEvent> entityEvents) {
     if (real == null) {
-      this.events = events;
+      this.events = entityEvents;
     } else {
-      real.setEvents(events);
+      real.setEvents(entityEvents);
     }
   }
 
-  public void addEvents(Set<TimelineEvent> events) {
+  public void addEvents(Set<TimelineEvent> entityEvents) {
     if (real == null) {
-      this.events.addAll(events);
+      this.events.addAll(entityEvents);
     } else {
-      real.addEvents(events);
+      real.addEvents(entityEvents);
     }
   }
 
@@ -389,20 +393,19 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
 
   @JsonSetter("isrelatedto")
   public void setIsRelatedToEntities(
-      Map<String, Set<String>> isRelatedToEntities) {
+      Map<String, Set<String>> isRelatedTo) {
     if (real == null) {
       this.isRelatedToEntities =
-          TimelineServiceHelper.mapCastToHashMap(isRelatedToEntities);
+          TimelineServiceHelper.mapCastToHashMap(isRelatedTo);
     } else {
-      real.setIsRelatedToEntities(isRelatedToEntities);
+      real.setIsRelatedToEntities(isRelatedTo);
     }
   }
 
   public void addIsRelatedToEntities(
-      Map<String, Set<String>> isRelatedToEntities) {
+      Map<String, Set<String>> isRelatedTo) {
     if (real == null) {
-      for (Map.Entry<String, Set<String>> entry : isRelatedToEntities
-          .entrySet()) {
+      for (Map.Entry<String, Set<String>> entry : isRelatedTo.entrySet()) {
         Set<String> ids = this.isRelatedToEntities.get(entry.getKey());
         if (ids == null) {
           ids = new HashSet<>();
@@ -411,7 +414,7 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
         ids.addAll(entry.getValue());
       }
     } else {
-      real.addIsRelatedToEntities(isRelatedToEntities);
+      real.addIsRelatedToEntities(isRelatedTo);
     }
   }
 
@@ -447,10 +450,9 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
     }
   }
 
-  public void addRelatesToEntities(Map<String, Set<String>> relatesToEntities) {
+  public void addRelatesToEntities(Map<String, Set<String>> relatesTo) {
     if (real == null) {
-      for (Map.Entry<String, Set<String>> entry : relatesToEntities
-          .entrySet()) {
+      for (Map.Entry<String, Set<String>> entry : relatesTo.entrySet()) {
         Set<String> ids = this.relatesToEntities.get(entry.getKey());
         if (ids == null) {
           ids = new HashSet<>();
@@ -459,7 +461,7 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
         ids.addAll(entry.getValue());
       }
     } else {
-      real.addRelatesToEntities(relatesToEntities);
+      real.addRelatesToEntities(relatesTo);
     }
   }
 
@@ -477,12 +479,12 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
   }
 
   @JsonSetter("relatesto")
-  public void setRelatesToEntities(Map<String, Set<String>> relatesToEntities) {
+  public void setRelatesToEntities(Map<String, Set<String>> relatesTo) {
     if (real == null) {
       this.relatesToEntities =
-          TimelineServiceHelper.mapCastToHashMap(relatesToEntities);
+          TimelineServiceHelper.mapCastToHashMap(relatesTo);
     } else {
-      real.setRelatesToEntities(relatesToEntities);
+      real.setRelatesToEntities(relatesTo);
     }
   }
 
@@ -496,11 +498,11 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
   }
 
   @JsonSetter("createdtime")
-  public void setCreatedTime(long createdTime) {
+  public void setCreatedTime(long createdTs) {
     if (real == null) {
-      this.createdTime = createdTime;
+      this.createdTime = createdTs;
     } else {
-      real.setCreatedTime(createdTime);
+      real.setCreatedTime(createdTs);
     }
   }
 
@@ -530,10 +532,12 @@ public class TimelineEntity implements Comparable<TimelineEntity> {
 
   @Override
   public boolean equals(Object obj) {
-    if (this == obj)
+    if (this == obj) {
       return true;
-    if (!(obj instanceof TimelineEntity))
+    }
+    if (!(obj instanceof TimelineEntity)) {
       return false;
+    }
     TimelineEntity other = (TimelineEntity) obj;
     return getIdentifier().equals(other.getIdentifier());
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntityType.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntityType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntityType.java
index ba32e20..8fcc2ae 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntityType.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntityType.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * Defines type of entity.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public enum TimelineEntityType {
@@ -34,51 +37,63 @@ public enum TimelineEntityType {
 
   /**
    * Whether the input type can be a parent of this entity.
+   *
+   * @param type entity type.
+   * @return true, if this entity type is parent of passed entity type, false
+   *     otherwise.
    */
   public boolean isParent(TimelineEntityType type) {
     switch (this) {
-      case YARN_CLUSTER:
-        return false;
-      case YARN_FLOW_RUN:
-        return YARN_FLOW_RUN == type || YARN_CLUSTER == type;
-      case YARN_APPLICATION:
-        return YARN_FLOW_RUN == type || YARN_CLUSTER == type;
-      case YARN_APPLICATION_ATTEMPT:
-        return YARN_APPLICATION == type;
-      case YARN_CONTAINER:
-        return YARN_APPLICATION_ATTEMPT == type;
-      case YARN_QUEUE:
-        return YARN_QUEUE == type;
-      default:
-        return false;
+    case YARN_CLUSTER:
+      return false;
+    case YARN_FLOW_RUN:
+      return YARN_FLOW_RUN == type || YARN_CLUSTER == type;
+    case YARN_APPLICATION:
+      return YARN_FLOW_RUN == type || YARN_CLUSTER == type;
+    case YARN_APPLICATION_ATTEMPT:
+      return YARN_APPLICATION == type;
+    case YARN_CONTAINER:
+      return YARN_APPLICATION_ATTEMPT == type;
+    case YARN_QUEUE:
+      return YARN_QUEUE == type;
+    default:
+      return false;
     }
   }
 
   /**
    * Whether the input type can be a child of this entity.
+   *
+   * @param type entity type.
+   * @return true, if this entity type is child of passed entity type, false
+   *     otherwise.
    */
   public boolean isChild(TimelineEntityType type) {
     switch (this) {
-      case YARN_CLUSTER:
-        return YARN_FLOW_RUN == type || YARN_APPLICATION == type;
-      case YARN_FLOW_RUN:
-        return YARN_FLOW_RUN == type || YARN_APPLICATION == type;
-      case YARN_APPLICATION:
-        return YARN_APPLICATION_ATTEMPT == type;
-      case YARN_APPLICATION_ATTEMPT:
-        return YARN_CONTAINER == type;
-      case YARN_CONTAINER:
-        return false;
-      case YARN_QUEUE:
-        return YARN_QUEUE == type;
-      default:
-        return false;
+    case YARN_CLUSTER:
+      return YARN_FLOW_RUN == type || YARN_APPLICATION == type;
+    case YARN_FLOW_RUN:
+      return YARN_FLOW_RUN == type || YARN_APPLICATION == type;
+    case YARN_APPLICATION:
+      return YARN_APPLICATION_ATTEMPT == type;
+    case YARN_APPLICATION_ATTEMPT:
+      return YARN_CONTAINER == type;
+    case YARN_CONTAINER:
+      return false;
+    case YARN_QUEUE:
+      return YARN_QUEUE == type;
+    default:
+      return false;
     }
   }
 
   /**
    * Whether the type of this entity matches the type indicated by the input
    * argument.
+   *
+   * @param typeString entity type represented as a string.
+   * @return true, if string representation of this entity type matches the
+   *     entity type passed.
    */
   public boolean matches(String typeString) {
     return toString().equals(typeString);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEvent.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEvent.java
index a563658..87fc291 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEvent.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEvent.java
@@ -28,6 +28,11 @@ import javax.xml.bind.annotation.XmlRootElement;
 import java.util.HashMap;
 import java.util.Map;
 
+/**
+ * This class contains the information of an event that belongs to an entity.
+ * Users are free to define what the event means, such as starting an
+ * application, container being allocated, etc.
+ */
 @XmlRootElement(name = "event")
 @XmlAccessorType(XmlAccessType.NONE)
 @InterfaceAudience.Public
@@ -48,8 +53,8 @@ public class TimelineEvent implements Comparable<TimelineEvent> {
     return id;
   }
 
-  public void setId(String id) {
-    this.id = id;
+  public void setId(String eventId) {
+    this.id = eventId;
   }
 
   // required by JAXB
@@ -63,12 +68,12 @@ public class TimelineEvent implements Comparable<TimelineEvent> {
     return info;
   }
 
-  public void setInfo(Map<String, Object> info) {
-    this.info = TimelineServiceHelper.mapCastToHashMap(info);
+  public void setInfo(Map<String, Object> infos) {
+    this.info = TimelineServiceHelper.mapCastToHashMap(infos);
   }
 
-  public void addInfo(Map<String, Object> info) {
-    this.info.putAll(info);
+  public void addInfo(Map<String, Object> infos) {
+    this.info.putAll(infos);
   }
 
   public void addInfo(String key, Object value) {
@@ -80,8 +85,8 @@ public class TimelineEvent implements Comparable<TimelineEvent> {
     return timestamp;
   }
 
-  public void setTimestamp(long timestamp) {
-    this.timestamp = timestamp;
+  public void setTimestamp(long ts) {
+    this.timestamp = ts;
   }
 
   public boolean isValid() {
@@ -97,15 +102,18 @@ public class TimelineEvent implements Comparable<TimelineEvent> {
 
   @Override
   public boolean equals(Object o) {
-    if (this == o)
+    if (this == o) {
       return true;
-    if (!(o instanceof TimelineEvent))
+    }
+    if (!(o instanceof TimelineEvent)) {
       return false;
+    }
 
     TimelineEvent event = (TimelineEvent) o;
 
-    if (timestamp != event.timestamp)
+    if (timestamp != event.timestamp) {
       return false;
+    }
     if (!id.equals(event.id)) {
       return false;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
index e3870da..2f60515 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
@@ -28,12 +28,19 @@ import java.util.Comparator;
 import java.util.Map;
 import java.util.TreeMap;
 
+/**
+ * This class contains the information of a metric that is related to some
+ * entity. Metric can either be a time series or single value.
+ */
 @XmlRootElement(name = "metric")
 @XmlAccessorType(XmlAccessType.NONE)
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class TimelineMetric {
 
+  /**
+   * Type of metric.
+   */
   public static enum Type {
     SINGLE_VALUE,
     TIME_SERIES
@@ -63,8 +70,8 @@ public class TimelineMetric {
     return type;
   }
 
-  public void setType(Type type) {
-    this.type = type;
+  public void setType(Type metricType) {
+    this.type = metricType;
   }
 
   @XmlElement(name = "id")
@@ -72,8 +79,8 @@ public class TimelineMetric {
     return id;
   }
 
-  public void setId(String id) {
-    this.id = id;
+  public void setId(String metricId) {
+    this.id = metricId;
   }
 
   // required by JAXB
@@ -87,24 +94,24 @@ public class TimelineMetric {
     return values;
   }
 
-  public void setValues(Map<Long, Number> values) {
+  public void setValues(Map<Long, Number> vals) {
     if (type == Type.SINGLE_VALUE) {
-      overwrite(values);
+      overwrite(vals);
     } else {
       if (values != null) {
         this.values = new TreeMap<Long, Number>(reverseComparator);
-        this.values.putAll(values);
+        this.values.putAll(vals);
       } else {
         this.values = null;
       }
     }
   }
 
-  public void addValues(Map<Long, Number> values) {
+  public void addValues(Map<Long, Number> vals) {
     if (type == Type.SINGLE_VALUE) {
-      overwrite(values);
+      overwrite(vals);
     } else {
-      this.values.putAll(values);
+      this.values.putAll(vals);
     }
   }
 
@@ -115,14 +122,14 @@ public class TimelineMetric {
     values.put(timestamp, value);
   }
 
-  private void overwrite(Map<Long, Number> values) {
-    if (values.size() > 1) {
+  private void overwrite(Map<Long, Number> vals) {
+    if (vals.size() > 1) {
       throw new IllegalArgumentException(
           "Values cannot contain more than one point in " +
               Type.SINGLE_VALUE + " mode");
     }
     this.values.clear();
-    this.values.putAll(values);
+    this.values.putAll(vals);
   }
 
   public boolean isValid() {
@@ -139,10 +146,12 @@ public class TimelineMetric {
   // Only check if type and id are equal
   @Override
   public boolean equals(Object o) {
-    if (this == o)
+    if (this == o) {
       return true;
-    if (!(o instanceof TimelineMetric))
+    }
+    if (!(o instanceof TimelineMetric)) {
       return false;
+    }
 
     TimelineMetric m = (TimelineMetric) o;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineWriteResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineWriteResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineWriteResponse.java
index 4739d8f..eda1ee2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineWriteResponse.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineWriteResponse.java
@@ -29,9 +29,9 @@ import java.util.List;
 
 /**
  * A class that holds a list of put errors. This is the response returned when a
- * list of {@link TimelineEntity} objects is added to the timeline. If there are errors
- * in storing individual entity objects, they will be indicated in the list of
- * errors.
+ * list of {@link TimelineEntity} objects is added to the timeline. If there are
+ * errors in storing individual entity objects, they will be indicated in the
+ * list of errors.
  */
 @XmlRootElement(name = "response")
 @XmlAccessorType(XmlAccessType.NONE)
@@ -46,7 +46,7 @@ public class TimelineWriteResponse {
   }
 
   /**
-   * Get a list of {@link TimelineWriteError} instances
+   * Get a list of {@link TimelineWriteError} instances.
    *
    * @return a list of {@link TimelineWriteError} instances
    */
@@ -56,7 +56,7 @@ public class TimelineWriteResponse {
   }
 
   /**
-   * Add a single {@link TimelineWriteError} instance into the existing list
+   * Add a single {@link TimelineWriteError} instance into the existing list.
    *
    * @param error
    *          a single {@link TimelineWriteError} instance
@@ -66,24 +66,24 @@ public class TimelineWriteResponse {
   }
 
   /**
-   * Add a list of {@link TimelineWriteError} instances into the existing list
+   * Add a list of {@link TimelineWriteError} instances into the existing list.
    *
-   * @param errors
+   * @param writeErrors
    *          a list of {@link TimelineWriteError} instances
    */
-  public void addErrors(List<TimelineWriteError> errors) {
-    this.errors.addAll(errors);
+  public void addErrors(List<TimelineWriteError> writeErrors) {
+    this.errors.addAll(writeErrors);
   }
 
   /**
-   * Set the list to the given list of {@link TimelineWriteError} instances
+   * Set the list to the given list of {@link TimelineWriteError} instances.
    *
-   * @param errors
+   * @param writeErrors
    *          a list of {@link TimelineWriteError} instances
    */
-  public void setErrors(List<TimelineWriteError> errors) {
+  public void setErrors(List<TimelineWriteError> writeErrors) {
     this.errors.clear();
-    this.errors.addAll(errors);
+    this.errors.addAll(writeErrors);
   }
 
   /**
@@ -106,7 +106,7 @@ public class TimelineWriteResponse {
     private int errorCode;
 
     /**
-     * Get the entity Id
+     * Get the entity Id.
      *
      * @return the entity Id
      */
@@ -116,17 +116,16 @@ public class TimelineWriteResponse {
     }
 
     /**
-     * Set the entity Id
+     * Set the entity Id.
      *
-     * @param entityId
-     *          the entity Id
+     * @param id the entity Id.
      */
-    public void setEntityId(String entityId) {
-      this.entityId = entityId;
+    public void setEntityId(String id) {
+      this.entityId = id;
     }
 
     /**
-     * Get the entity type
+     * Get the entity type.
      *
      * @return the entity type
      */
@@ -136,17 +135,16 @@ public class TimelineWriteResponse {
     }
 
     /**
-     * Set the entity type
+     * Set the entity type.
      *
-     * @param entityType
-     *          the entity type
+     * @param type the entity type.
      */
-    public void setEntityType(String entityType) {
-      this.entityType = entityType;
+    public void setEntityType(String type) {
+      this.entityType = type;
     }
 
     /**
-     * Get the error code
+     * Get the error code.
      *
      * @return an error code
      */
@@ -156,13 +154,12 @@ public class TimelineWriteResponse {
     }
 
     /**
-     * Set the error code to the given error code
+     * Set the error code to the given error code.
      *
-     * @param errorCode
-     *          an error code
+     * @param code an error code.
      */
-    public void setErrorCode(int errorCode) {
-      this.errorCode = errorCode;
+    public void setErrorCode(int code) {
+      this.errorCode = code;
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/UserEntity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/UserEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/UserEntity.java
index a229fd8..ced57c6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/UserEntity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/UserEntity.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * This entity represents a user.
+ */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
 public class UserEntity extends TimelineEntity {
@@ -30,7 +33,8 @@ public class UserEntity extends TimelineEntity {
   public UserEntity(TimelineEntity entity) {
     super(entity);
     if (!entity.getType().equals(TimelineEntityType.YARN_USER.toString())) {
-      throw new IllegalArgumentException("Incompatible entity type: " + getId());
+      throw new IllegalArgumentException("Incompatible entity type: "
+          + getId());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
index 89a9e9b..c43bd62 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
@@ -15,7 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-@InterfaceAudience.Public package org.apache.hadoop.yarn.api.records.timelineservice;
+
+/**
+ * Package org.apache.hadoop.yarn.api.records.timelineservice contains classes
+ * which define the data model for ATSv2.
+ */
+@InterfaceAudience.Public
+package org.apache.hadoop.yarn.api.records.timelineservice;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 6a12d8f..9b43fbd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -401,7 +401,8 @@ public class YarnConfiguration extends Configuration {
    */
   public static final String RM_SYSTEM_METRICS_PUBLISHER_ENABLED = RM_PREFIX
       + "system-metrics-publisher.enabled";
-  public static final boolean DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_ENABLED = false;
+  public static final boolean DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_ENABLED =
+      false;
 
   /**
    *  The setting that controls whether yarn system metrics is published on the
@@ -423,8 +424,8 @@ public class YarnConfiguration extends Configuration {
 
   public static final String RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE =
       RM_PREFIX + "system-metrics-publisher.dispatcher.pool-size";
-  public static final int DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE =
-      10;
+  public static final int
+      DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE = 10;
 
   //RM delegation token related keys
   public static final String RM_DELEGATION_KEY_UPDATE_INTERVAL_KEY =
@@ -1638,7 +1639,7 @@ public class YarnConfiguration extends Configuration {
   public static final String TIMELINE_SERVICE_UI_WEB_PATH_PREFIX =
       TIMELINE_SERVICE_PREFIX + "ui-web-path.";
 
-  /** Timeline client settings */
+  /** Timeline client settings. */
   public static final String TIMELINE_SERVICE_CLIENT_PREFIX =
       TIMELINE_SERVICE_PREFIX + "client.";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
index 59ac5b3..ade4f9a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
@@ -53,7 +53,7 @@ public abstract class TimelineClient extends AbstractService {
    * construct and initialize a timeline client if the following operations are
    * supposed to be conducted by that user.
    */
-  protected ApplicationId contextAppId;
+  private ApplicationId contextAppId;
 
   /**
    * Creates an instance of the timeline v.1.x client.
@@ -76,7 +76,7 @@ public abstract class TimelineClient extends AbstractService {
   @Private
   protected TimelineClient(String name, ApplicationId appId) {
     super(name);
-    contextAppId = appId;
+    setContextAppId(appId);
   }
 
   /**
@@ -240,11 +240,18 @@ public abstract class TimelineClient extends AbstractService {
 
   /**
    * <p>
-   * Update the timeline service address where the request will be sent to
+   * Update the timeline service address where the request will be sent to.
    * </p>
    * @param address
    *          the timeline service address
    */
   public abstract void setTimelineServiceAddress(String address);
 
+  protected ApplicationId getContextAppId() {
+    return contextAppId;
+  }
+
+  protected void setContextAppId(ApplicationId appId) {
+    this.contextAppId = appId;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 3e63242..a158a56 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -390,8 +390,8 @@ public class TimelineClientImpl extends TimelineClient {
       entitiesContainer.addEntity(entity);
     }
     MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-    if (contextAppId != null) {
-      params.add("appid", contextAppId.toString());
+    if (getContextAppId() != null) {
+      params.add("appid", getContextAppId().toString());
     }
     if (async) {
       params.add("async", Boolean.TRUE.toString());
@@ -429,8 +429,7 @@ public class TimelineClientImpl extends TimelineClient {
         URI uri = constructResURI(getConfig(), timelineServiceAddress, true);
         putObjects(uri, path, params, obj);
         needRetry = false;
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         // TODO only handle exception for timelineServiceAddress being updated.
         // skip retry for other exceptions.
         checkRetryWithSleep(retries, e);
@@ -453,10 +452,9 @@ public class TimelineClientImpl extends TimelineClient {
         Thread.currentThread().interrupt();
       }
     } else {
-      LOG.error(
-        "TimelineClient has reached to max retry times :" +
-        this.maxServiceRetries + " for service address: " +
-        timelineServiceAddress);
+      LOG.error("TimelineClient has reached to max retry times :" +
+          this.maxServiceRetries + " for service address: " +
+          timelineServiceAddress);
       if (e instanceof YarnException) {
         throw (YarnException)e;
       } else if (e instanceof IOException) {
@@ -634,7 +632,7 @@ public class TimelineClientImpl extends TimelineClient {
   }
 
   /**
-   * Poll TimelineServiceAddress for maximum of retries times if it is null
+   * Poll TimelineServiceAddress for maximum of retries times if it is null.
    * @param retries
    * @return the left retry times
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
index 64aa7d4..53f5af2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
@@ -45,8 +45,10 @@ import org.codehaus.jackson.map.ObjectMapper;
 public class TimelineUtils {
 
   public static final String FLOW_NAME_TAG_PREFIX = "TIMELINE_FLOW_NAME_TAG";
-  public static final String FLOW_VERSION_TAG_PREFIX = "TIMELINE_FLOW_VERSION_TAG";
-  public static final String FLOW_RUN_ID_TAG_PREFIX = "TIMELINE_FLOW_RUN_ID_TAG";
+  public static final String FLOW_VERSION_TAG_PREFIX =
+      "TIMELINE_FLOW_VERSION_TAG";
+  public static final String FLOW_RUN_ID_TAG_PREFIX =
+      "TIMELINE_FLOW_RUN_ID_TAG";
 
   private static ObjectMapper mapper;
 
@@ -125,38 +127,39 @@ public class TimelineUtils {
     return SecurityUtil.buildTokenService(timelineServiceAddr);
   }
 
-  public static String generateDefaultFlowNameBasedOnAppId(ApplicationId appId) {
+  public static String generateDefaultFlowNameBasedOnAppId(
+      ApplicationId appId) {
     return "flow_" + appId.getClusterTimestamp() + "_" + appId.getId();
   }
 
   /**
-   * Generate flow name tag
+   * Generate flow name tag.
    *
    * @param flowName flow name that identifies a distinct flow application which
    *                 can be run repeatedly over time
-   * @return
+   * @return flow name tag.
    */
   public static String generateFlowNameTag(String flowName) {
     return FLOW_NAME_TAG_PREFIX + ":" + flowName;
   }
 
   /**
-   * Generate flow version tag
+   * Generate flow version tag.
    *
    * @param flowVersion flow version that keeps track of the changes made to the
    *                    flow
-   * @return
+   * @return flow version tag.
    */
   public static String generateFlowVersionTag(String flowVersion) {
     return FLOW_VERSION_TAG_PREFIX + ":" + flowVersion;
   }
 
   /**
-   * Generate flow run ID tag
+   * Generate flow run ID tag.
    *
    * @param flowRunId flow run ID that identifies one instance (or specific
    *                  execution) of that flow
-   * @return
+   * @return flow run id tag.
    */
   public static String generateFlowRunIdTag(long flowRunId) {
     return FLOW_RUN_ID_TAG_PREFIX + ":" + flowRunId;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
index 596b5d3..912003a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
@@ -457,7 +457,7 @@ public class NodeManager extends CompositeService
     protected final ConcurrentMap<ContainerId, Container> containers =
         new ConcurrentSkipListMap<ContainerId, Container>();
 
-    protected Map<ApplicationId, String> registeredCollectors;
+    private Map<ApplicationId, String> registeredCollectors;
 
     protected final ConcurrentMap<ContainerId,
         org.apache.hadoop.yarn.api.records.Container> increasedContainers =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
index 3ba81ce..548c861 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
@@ -52,12 +52,11 @@ public class NMCollectorService extends CompositeService implements
 
   private static final Log LOG = LogFactory.getLog(NMCollectorService.class);
 
-  final Context context;
+  private final Context context;
 
   private Server server;
 
   public NMCollectorService(Context context) {
-
     super(NMCollectorService.class.getName());
     this.context = context;
   }
@@ -123,7 +122,8 @@ public class NMCollectorService extends CompositeService implements
           client.setTimelineServiceAddress(collectorAddr);
         }
       }
-      ((NodeManager.NMContext)context).addRegisteredCollectors(newCollectorsMap);
+      ((NodeManager.NMContext)context).addRegisteredCollectors(
+          newCollectorsMap);
     }
 
     return ReportNewCollectorInfoResponse.newInstance();
@@ -139,6 +139,7 @@ public class NMCollectorService extends CompositeService implements
           " doesn't exist on NM.");
     }
     return GetTimelineCollectorContextResponse.newInstance(
-        app.getUser(), app.getFlowName(), app.getFlowVersion(), app.getFlowRunId());
+        app.getUser(), app.getFlowName(), app.getFlowVersion(),
+        app.getFlowRunId());
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/package-info.java
new file mode 100644
index 0000000..7bf597b
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/package-info.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.nodemanager.collectormanager contains
+ * classes for handling timeline collector information.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.nodemanager.collectormanager;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEvent.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEvent.java
index af8d94c..f275b37 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEvent.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEvent.java
@@ -20,6 +20,10 @@ package org.apache.hadoop.yarn.server.nodemanager.timelineservice;
 
 import org.apache.hadoop.yarn.event.AbstractEvent;
 
+/**
+ * Event posted to NMTimelinePublisher which in turn publishes it to
+ * timelineservice v2.
+ */
 public class NMTimelineEvent extends AbstractEvent<NMTimelineEventType> {
   public NMTimelineEvent(NMTimelineEventType type) {
     super(type);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEventType.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEventType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEventType.java
index c1129af..b4ae45a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEventType.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelineEventType.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.yarn.server.nodemanager.timelineservice;
 
+/**
+ * Type of {@link NMTimelineEvent}.
+ */
 public enum NMTimelineEventType {
   // Publish the NM Timeline entity
   TIMELINE_ENTITY_PUBLISH,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
index affaaae..684feaa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
@@ -127,7 +127,8 @@ public class NMTimelinePublisher extends CompositeService {
         memoryMetric.addValue(currentTimeMillis, pmemUsage);
         entity.addMetric(memoryMetric);
       }
-      if (cpuUsageTotalCoresPercentage != ResourceCalculatorProcessTree.UNAVAILABLE) {
+      if (cpuUsageTotalCoresPercentage !=
+          ResourceCalculatorProcessTree.UNAVAILABLE) {
         TimelineMetric cpuMetric = new TimelineMetric();
         cpuMetric.setId(ContainerMetric.CPU.toString() + pId);
         cpuMetric.addValue(currentTimeMillis, cpuUsageTotalCoresPercentage);
@@ -189,7 +190,8 @@ public class NMTimelinePublisher extends CompositeService {
     putEntity(entity, containerId.getApplicationAttemptId().getApplicationId());
   }
 
-  private static ContainerEntity createContainerEntity(ContainerId containerId) {
+  private static ContainerEntity createContainerEntity(
+      ContainerId containerId) {
     ContainerEntity entity = new ContainerEntity();
     entity.setId(containerId.toString());
     Identifier parentIdentifier = new Identifier();
@@ -214,6 +216,7 @@ public class NMTimelinePublisher extends CompositeService {
     }
   }
 
+  @SuppressWarnings("unchecked")
   public void publishApplicationEvent(ApplicationEvent event) {
     // publish only when the desired event is received
     switch (event.getType()) {
@@ -226,14 +229,14 @@ public class NMTimelinePublisher extends CompositeService {
 
     default:
       if (LOG.isDebugEnabled()) {
-        LOG.debug(event.getType()
-            + " is not a desired ApplicationEvent which needs to be published by"
-            + " NMTimelinePublisher");
+        LOG.debug(event.getType() + " is not a desired ApplicationEvent which"
+            + " needs to be published by NMTimelinePublisher");
       }
       break;
     }
   }
 
+  @SuppressWarnings("unchecked")
   public void publishContainerEvent(ContainerEvent event) {
     // publish only when the desired event is received
     switch (event.getType()) {
@@ -251,6 +254,7 @@ public class NMTimelinePublisher extends CompositeService {
     }
   }
 
+  @SuppressWarnings("unchecked")
   public void publishLocalizationEvent(LocalizationEvent event) {
     // publish only when the desired event is received
     switch (event.getType()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/package-info.java
new file mode 100644
index 0000000..66233fd
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/package-info.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.nodemanager.timelineservice contains
+ * classes related to publishing container events and other NM lifecycle events
+ * to ATSv2.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.nodemanager.timelineservice;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;


[2/4] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

Posted by sj...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
index 50136de..663a18a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
@@ -42,12 +42,13 @@ public interface TimelineWriter extends Service {
    * @param userId context user ID
    * @param flowName context flow name
    * @param flowVersion context flow version
-   * @param flowRunId
-   * @param appId context app ID
+   * @param flowRunId run id for the flow.
+   * @param appId context app ID.
    * @param data
    *          a {@link TimelineEntities} object.
    * @return a {@link TimelineWriteResponse} object.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while storing
+   *     or writing entities to the backend storage.
    */
   TimelineWriteResponse write(String clusterId, String userId,
       String flowName, String flowVersion, long flowRunId, String appId,
@@ -65,8 +66,11 @@ public interface TimelineWriter extends Service {
    *          a {@link TimelineEntity} object
    *          a {@link TimelineAggregationTrack} enum
    *          value.
+   * @param track Specifies the track or dimension along which aggregation would
+   *     occur. Includes USER, FLOW, QUEUE, etc.
    * @return a {@link TimelineWriteResponse} object.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while aggregating
+   *     entities to the backend storage.
    */
   TimelineWriteResponse aggregate(TimelineEntity data,
       TimelineAggregationTrack track) throws IOException;
@@ -76,7 +80,8 @@ public interface TimelineWriter extends Service {
    * written to the storage when the method returns. This may be a potentially
    * time-consuming operation, and should be used judiciously.
    *
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while flushing
+   *     entities to the backend storage.
    */
   void flush() throws IOException;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
index c03c9b6..5734389 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
 public enum ApplicationColumn implements Column<ApplicationTable> {
 
   /**
-   * App id
+   * App id.
    */
   ID(ApplicationColumnFamily.INFO, "id"),
 
@@ -84,7 +84,7 @@ public enum ApplicationColumn implements Column<ApplicationTable> {
   /**
    * Retrieve an {@link ApplicationColumn} given a name, or null if there is no
    * match. The following holds true: {@code columnFor(x) == columnFor(y)} if
-   * and only if {@code x.equals(y)} or {@code (x == y == null)}
+   * and only if {@code x.equals(y)} or {@code (x == y == null)}.
    *
    * @param columnQualifier Name of the column to retrieve
    * @return the corresponding {@link ApplicationColumn} or null

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
index 056e51f..9120f3d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
@@ -54,7 +54,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
   INFO(ApplicationColumnFamily.INFO, "i"),
 
   /**
-   * Lifecycle events for an application
+   * Lifecycle events for an application.
    */
   EVENT(ApplicationColumnFamily.INFO, "e"),
 
@@ -214,7 +214,7 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
    *         is to facilitate returning byte arrays of values that were not
    *         Strings. If they can be treated as Strings, you should use
    *         {@link #readResults(Result)} instead.
-   * @throws IOException
+   * @throws IOException if any problem occurs while reading results.
    */
   public Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result)
       throws IOException {
@@ -276,8 +276,8 @@ public enum ApplicationColumnPrefix implements ColumnPrefix<ApplicationTable> {
     for (ApplicationColumnPrefix acp : ApplicationColumnPrefix.values()) {
       // Find a match based column family and on name.
       if (acp.columnFamily.equals(columnFamily)
-          && (((columnPrefix == null) && (acp.getColumnPrefix() == null)) || (acp
-              .getColumnPrefix().equals(columnPrefix)))) {
+          && (((columnPrefix == null) && (acp.getColumnPrefix() == null)) ||
+          (acp.getColumnPrefix().equals(columnPrefix)))) {
         return acp;
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java
index 3c182b9..ad2aa7a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java
@@ -62,11 +62,11 @@ public class ApplicationRowKey {
 
   /**
    * Constructs a row key prefix for the application table as follows:
-   * {@code clusterId!userName!flowName!}
+   * {@code clusterId!userName!flowName!}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
@@ -78,12 +78,12 @@ public class ApplicationRowKey {
 
   /**
    * Constructs a row key prefix for the application table as follows:
-   * {@code clusterId!userName!flowName!flowRunId!}
+   * {@code clusterId!userName!flowName!flowRunId!}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
+   * @param flowRunId Run Id for the flow.
    * @return byte array with the row key prefix
    */
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
@@ -96,13 +96,13 @@ public class ApplicationRowKey {
 
   /**
    * Constructs a row key for the application table as follows:
-   * {@code clusterId!userName!flowName!flowRunId!AppId}
+   * {@code clusterId!userName!flowName!flowRunId!AppId}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
-   * @param appId
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
+   * @param flowRunId Run Id for the flow.
+   * @param appId App Id.
    * @return byte array with the row key
    */
   public static byte[] getRowKey(String clusterId, String userId,
@@ -119,6 +119,9 @@ public class ApplicationRowKey {
 
   /**
    * Given the raw row key as bytes, returns the row key as an object.
+   *
+   * @param rowKey Byte representation  of row key.
+   * @return An <cite>ApplicationRowKey</cite> object.
    */
   public static ApplicationRowKey parseRowKey(byte[] rowKey) {
     byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
index 681c200..a02f768 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationTable.java
@@ -68,28 +68,28 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBas
  * </pre>
  */
 public class ApplicationTable extends BaseTable<ApplicationTable> {
-  /** application prefix */
+  /** application prefix. */
   private static final String PREFIX =
       YarnConfiguration.TIMELINE_SERVICE_PREFIX + ".application";
 
-  /** config param name that specifies the application table name */
+  /** config param name that specifies the application table name. */
   public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
 
   /**
    * config param name that specifies the TTL for metrics column family in
-   * application table
+   * application table.
    */
   private static final String METRICS_TTL_CONF_NAME = PREFIX
       + ".table.metrics.ttl";
 
-  /** default value for application table name */
+  /** default value for application table name. */
   private static final String DEFAULT_TABLE_NAME =
       "timelineservice.application";
 
-  /** default TTL is 30 days for metrics timeseries */
+  /** default TTL is 30 days for metrics timeseries. */
   private static final int DEFAULT_METRICS_TTL = 2592000;
 
-  /** default max number of versions */
+  /** default max number of versions. */
   private static final int DEFAULT_METRICS_MAX_VERSIONS = 1000;
 
   private static final Log LOG = LogFactory.getLog(ApplicationTable.class);
@@ -139,8 +139,8 @@ public class ApplicationTable extends BaseTable<ApplicationTable> {
     metricsCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS);
     metricsCF.setTimeToLive(hbaseConf.getInt(METRICS_TTL_CONF_NAME,
         DEFAULT_METRICS_TTL));
-    applicationTableDescp
-        .setRegionSplitPolicyClassName("org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy");
+    applicationTableDescp.setRegionSplitPolicyClassName(
+        "org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy");
     applicationTableDescp.setValue("KeyPrefixRegionSplitPolicy.prefix_length",
         TimelineHBaseSchemaConstants.USERNAME_SPLIT_KEY_PREFIX_LENGTH);
     admin.createTable(applicationTableDescp,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
index c60e6f5..eda14e6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java
@@ -16,6 +16,10 @@
  * limitations under the License.
  */
 
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage.application
+ * contains classes related to implementation for application table.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage.application;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java
index 7f1ecaf..a8e1c66 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumn.java
@@ -35,17 +35,17 @@ import java.io.IOException;
 public enum AppToFlowColumn implements Column<AppToFlowTable> {
 
   /**
-   * The flow ID
+   * The flow ID.
    */
   FLOW_ID(AppToFlowColumnFamily.MAPPING, "flow_id"),
 
   /**
-   * The flow run ID
+   * The flow run ID.
    */
   FLOW_RUN_ID(AppToFlowColumnFamily.MAPPING, "flow_run_id"),
 
   /**
-   * The user
+   * The user.
    */
   USER_ID(AppToFlowColumnFamily.MAPPING, "user_id");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java
index d722d15..f3f045e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowColumnFamily.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
  */
 public enum AppToFlowColumnFamily implements ColumnFamily<AppToFlowTable> {
   /**
-   * Mapping column family houses known columns such as flowName and flowRunId
+   * Mapping column family houses known columns such as flowName and flowRunId.
    */
   MAPPING("m");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java
index 133952e..3085bb1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowRowKey.java
@@ -43,10 +43,10 @@ public class AppToFlowRowKey {
 
   /**
    * Constructs a row key prefix for the app_flow table as follows:
-   * {@code clusterId!AppId}
+   * {@code clusterId!AppId}.
    *
-   * @param clusterId
-   * @param appId
+   * @param clusterId Cluster Id.
+   * @param appId Application Id.
    * @return byte array with the row key
    */
   public static byte[] getRowKey(String clusterId, String appId) {
@@ -57,6 +57,9 @@ public class AppToFlowRowKey {
 
   /**
    * Given the raw row key as bytes, returns the row key as an object.
+   *
+   * @param rowKey a rowkey represented as a byte array.
+   * @return an <cite>AppToFlowRowKey</cite> object.
    */
   public static AppToFlowRowKey parseRowKey(byte[] rowKey) {
     byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
index 868c6f3..301cf99 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/AppToFlowTable.java
@@ -58,14 +58,14 @@ import java.io.IOException;
  * </pre>
  */
 public class AppToFlowTable extends BaseTable<AppToFlowTable> {
-  /** app_flow prefix */
+  /** app_flow prefix. */
   private static final String PREFIX =
       YarnConfiguration.TIMELINE_SERVICE_PREFIX + "app-flow";
 
-  /** config param name that specifies the app_flow table name */
+  /** config param name that specifies the app_flow table name. */
   public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
 
-  /** default value for app_flow table name */
+  /** default value for app_flow table name. */
   private static final String DEFAULT_TABLE_NAME = "timelineservice.app_flow";
 
   private static final Log LOG = LogFactory.getLog(AppToFlowTable.class);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
index df7ffc1..f01d982 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/apptoflow/package-info.java
@@ -15,6 +15,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow
+ * contains classes related to implementation for app to flow table.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
index 9545438..8581aa4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BaseTable.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.client.Table;
 public abstract class BaseTable<T> {
 
   /**
-   * Name of config variable that is used to point to this table
+   * Name of config variable that is used to point to this table.
    */
   private final String tableNameConfName;
 
@@ -52,6 +52,8 @@ public abstract class BaseTable<T> {
   /**
    * @param tableNameConfName name of config variable that is used to point to
    *          this table.
+   * @param defaultTableName Default table name if table from config is not
+   *          found.
    */
   protected BaseTable(String tableNameConfName, String defaultTableName) {
     this.tableNameConfName = tableNameConfName;
@@ -61,10 +63,11 @@ public abstract class BaseTable<T> {
   /**
    * Used to create a type-safe mutator for this table.
    *
-   * @param hbaseConf used to read table name
+   * @param hbaseConf used to read table name.
    * @param conn used to create a table from.
    * @return a type safe {@link BufferedMutator} for the entity table.
-   * @throws IOException
+   * @throws IOException if any exception occurs while creating mutator for the
+   *     table.
    */
   public TypedBufferedMutator<T> getTableMutator(Configuration hbaseConf,
       Connection conn) throws IOException {
@@ -88,7 +91,7 @@ public abstract class BaseTable<T> {
    * @param conn used to create table from
    * @param scan that specifies what you want to read from this table.
    * @return scanner for the table.
-   * @throws IOException
+   * @throws IOException if any exception occurs while getting the scanner.
    */
   public ResultScanner getResultScanner(Configuration hbaseConf,
       Connection conn, Scan scan) throws IOException {
@@ -102,7 +105,7 @@ public abstract class BaseTable<T> {
    * @param conn used to create table from
    * @param get that specifies what single row you want to get from this table
    * @return result of get operation
-   * @throws IOException
+   * @throws IOException if any exception occurs while getting the result.
    */
   public Result getResult(Configuration hbaseConf, Connection conn, Get get)
       throws IOException {
@@ -113,7 +116,8 @@ public abstract class BaseTable<T> {
   /**
    * Get the table name for this table.
    *
-   * @param hbaseConf
+   * @param hbaseConf HBase configuration from which table name will be fetched.
+   * @return A {@link TableName} object.
    */
   public TableName getTableName(Configuration hbaseConf) {
     TableName table =
@@ -126,8 +130,9 @@ public abstract class BaseTable<T> {
    * Used to create the table in HBase. Should be called only once (per HBase
    * instance).
    *
-   * @param admin
-   * @param hbaseConf
+   * @param admin Used for doing HBase table operations.
+   * @param hbaseConf Hbase configuration.
+   * @throws IOException if any exception occurs while creating the table.
    */
   public abstract void createTable(Admin admin, Configuration hbaseConf)
       throws IOException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
index fe8f9c6..cf469a5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/BufferedMutatorDelegator.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.client.BufferedMutator;
 import org.apache.hadoop.hbase.client.Mutation;
 
 /**
- * To be used to wrap an actual {@link BufferedMutator} in a type safe manner
+ * To be used to wrap an actual {@link BufferedMutator} in a type safe manner.
  *
  * @param <T> The class referring to the table to be written to.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java
index 64c1cda..1f0b48f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Column.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
 
@@ -39,25 +38,26 @@ public interface Column<T> {
    *          column.
    * @param timestamp version timestamp. When null the server timestamp will be
    *          used.
-   * @param attributes Map of attributes for this mutation. used in the coprocessor
-   *          to set/read the cell tags. Can be null.
+   * @param attributes Map of attributes for this mutation. used in the
+   *     coprocessor to set/read the cell tags. Can be null.
    * @param inputValue the value to write to the rowKey and column qualifier.
    *          Nothing gets written when null.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered during store.
    */
-  public void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
+  void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
       Long timestamp, Object inputValue, Attribute... attributes)
       throws IOException;
 
   /**
    * Get the latest version of this specified column. Note: this call clones the
-   * value content of the hosting {@link Cell}.
+   * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
    *
    * @param result Cannot be null
    * @return result object (can be cast to whatever object was written to), or
    *         null when result doesn't contain this column.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while reading
+   *     result.
    */
-  public Object readResult(Result result) throws IOException;
+  Object readResult(Result result) throws IOException;
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java
index c84c016..452adcd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnFamily.java
@@ -29,6 +29,6 @@ public interface ColumnFamily<T> {
    *
    * @return a clone of the byte representation of the column family.
    */
-  public byte[] getBytes();
+  byte[] getBytes();
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
index 1e63ce5..15bb818 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
@@ -26,7 +26,6 @@ import java.util.TreeMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -82,7 +81,9 @@ public class ColumnHelper<T> {
    * @param inputValue
    *          the value to write to the rowKey and column qualifier. Nothing
    *          gets written when null.
-   * @throws IOException
+   * @param attributes Attributes to be set for HBase Put.
+   * @throws IOException if any problem occurs during store operation(sending
+   *          mutation to table).
    */
   public void store(byte[] rowKey, TypedBufferedMutator<?> tableMutator,
       byte[] columnQualifier, Long timestamp, Object inputValue,
@@ -140,13 +141,13 @@ public class ColumnHelper<T> {
 
   /**
    * Get the latest version of this specified column. Note: this call clones the
-   * value content of the hosting {@link Cell}.
+   * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
    *
    * @param result from which to read the value. Cannot be null
    * @param columnQualifierBytes referring to the column to be read.
    * @return latest version of the specified column of whichever object was
    *         written.
-   * @throws IOException
+   * @throws IOException if any problem occurs while reading result.
    */
   public Object readResult(Result result, byte[] columnQualifierBytes)
       throws IOException {
@@ -167,9 +168,9 @@ public class ColumnHelper<T> {
    *          columns are returned.
    * @param <V> the type of the values. The values will be cast into that type.
    * @return the cell values at each respective time in for form
-   *         {idA={timestamp1->value1}, idA={timestamp2->value2},
-   *         idB={timestamp3->value3}, idC={timestamp1->value4}}
-   * @throws IOException
+   *         {@literal {idA={timestamp1->value1}, idA={timestamp2->value2},
+   *         idB={timestamp3->value3}, idC={timestamp1->value4}}}
+   * @throws IOException if any problem occurs while reading results.
    */
   @SuppressWarnings("unchecked")
   public <V> NavigableMap<String, NavigableMap<Long, V>>
@@ -180,8 +181,9 @@ public class ColumnHelper<T> {
         new TreeMap<String, NavigableMap<Long, V>>();
 
     if (result != null) {
-      NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> resultMap =
-          result.getMap();
+      NavigableMap<
+          byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> resultMap =
+              result.getMap();
 
       NavigableMap<byte[], NavigableMap<Long, byte[]>> columnCellMap =
           resultMap.get(columnFamilyBytes);
@@ -240,7 +242,7 @@ public class ColumnHelper<T> {
    *         back and forth from Strings, you should use
    *         {@link #readResultsHavingCompoundColumnQualifiers(Result, byte[])}
    *         instead.
-   * @throws IOException
+   * @throws IOException if any problem occurs while reading results.
    */
   public Map<String, Object> readResults(Result result,
       byte[] columnPrefixBytes) throws IOException {
@@ -294,7 +296,7 @@ public class ColumnHelper<T> {
    *         non-null column prefix bytes, the column qualifier is returned as
    *         a list of parts, each part a byte[]. This is to facilitate
    *         returning byte arrays of values that were not Strings.
-   * @throws IOException
+   * @throws IOException if any problem occurs while reading results.
    */
   public Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result,
       byte[] columnPrefixBytes) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java
index 0f3ac4e..f221b31 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnPrefix.java
@@ -21,7 +21,6 @@ import java.io.IOException;
 import java.util.Map;
 import java.util.NavigableMap;
 
-import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
 
@@ -48,7 +47,8 @@ public interface ColumnPrefix<T> {
    *          coprocessor to set/read the cell tags.
    * @param inputValue the value to write to the rowKey and column qualifier.
    *          Nothing gets written when null.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while doing
+   *     store operation(sending mutation to the table).
    */
   void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
       byte[] qualifier, Long timestamp, Object inputValue,
@@ -69,7 +69,8 @@ public interface ColumnPrefix<T> {
    *          coprocessor to set/read the cell tags.
    * @param inputValue the value to write to the rowKey and column qualifier.
    *          Nothing gets written when null.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while doing
+   *     store operation(sending mutation to the table).
    */
   void store(byte[] rowKey, TypedBufferedMutator<T> tableMutator,
       String qualifier, Long timestamp, Object inputValue,
@@ -77,14 +78,15 @@ public interface ColumnPrefix<T> {
 
   /**
    * Get the latest version of this specified column. Note: this call clones the
-   * value content of the hosting {@link Cell}.
+   * value content of the hosting {@link org.apache.hadoop.hbase.Cell Cell}.
    *
    * @param result Cannot be null
    * @param qualifier column qualifier. Nothing gets read when null.
    * @return result object (can be cast to whatever object was written to) or
    *         null when specified column qualifier for this prefix doesn't exist
    *         in the result.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while reading
+   *     result.
    */
   Object readResult(Result result, String qualifier) throws IOException;
 
@@ -92,7 +94,8 @@ public interface ColumnPrefix<T> {
    * @param result from which to read columns
    * @return the latest values of columns in the column family with this prefix
    *         (or all of them if the prefix value is null).
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while reading
+   *     results.
    */
   Map<String, Object> readResults(Result result) throws IOException;
 
@@ -100,9 +103,10 @@ public interface ColumnPrefix<T> {
    * @param result from which to reads data with timestamps
    * @param <V> the type of the values. The values will be cast into that type.
    * @return the cell values at each respective time in for form
-   *         {idA={timestamp1->value1}, idA={timestamp2->value2},
-   *         idB={timestamp3->value3}, idC={timestamp1->value4}}
-   * @throws IOException
+   *         {@literal {idA={timestamp1->value1}, idA={timestamp2->value2},
+   *         idB={timestamp3->value3}, idC={timestamp1->value4}}}
+   * @throws IOException if there is any exception encountered while reading
+   *     result.
    */
   <V> NavigableMap<String, NavigableMap<Long, V>>
       readResultsWithTimestamps(Result result) throws IOException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java
index cdb8619..48c56f9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/LongConverter.java
@@ -55,8 +55,9 @@ public final class LongConverter implements NumericValueConverter {
   /**
    * Compares two numbers as longs. If either number is null, it will be taken
    * as 0.
-   * @param num1
-   * @param num2
+   *
+   * @param num1 the first {@code Long} to compare.
+   * @param num2 the second {@code Long} to compare.
    * @return -1 if num1 is less than num2, 0 if num1 is equal to num2 and 1 if
    * num1 is greater than num2.
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java
index 70964cd..8fb6536 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/NumericValueConverter.java
@@ -29,9 +29,10 @@ public interface NumericValueConverter extends ValueConverter,
   /**
    * Adds two or more numbers. If either of the numbers are null, it is taken as
    * 0.
-   * @param num1
-   * @param num2
-   * @param numbers
+   *
+   * @param num1 the first number to add.
+   * @param num2 the second number to add.
+   * @param numbers Rest of the numbers to be added.
    * @return result after adding up the numbers.
    */
   Number add(Number num1, Number num2, Number...numbers);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/OfflineAggregationInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/OfflineAggregationInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/OfflineAggregationInfo.java
index 16c03a3..3dc5f51 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/OfflineAggregationInfo.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/OfflineAggregationInfo.java
@@ -33,21 +33,24 @@ import java.sql.SQLException;
  */
 public final class OfflineAggregationInfo {
   /**
-   * Default flow level aggregation table name
+   * Default flow level aggregation table name.
    */
   @VisibleForTesting
   public static final String FLOW_AGGREGATION_TABLE_NAME
       = "yarn_timeline_flow_aggregation";
   /**
-   * Default user level aggregation table name
+   * Default user level aggregation table name.
    */
   public static final String USER_AGGREGATION_TABLE_NAME
       = "yarn_timeline_user_aggregation";
 
   // These lists are not taking effects in table creations.
-  private static final String[] FLOW_AGGREGATION_PK_LIST =
-      { "user", "cluster", "flow_name" };
-  private static final String[] USER_AGGREGATION_PK_LIST = { "user", "cluster"};
+  private static final String[] FLOW_AGGREGATION_PK_LIST = {
+      "user", "cluster", "flow_name"
+  };
+  private static final String[] USER_AGGREGATION_PK_LIST = {
+      "user", "cluster"
+  };
 
   private final String tableName;
   private final String[] primaryKeyList;
@@ -81,30 +84,32 @@ public final class OfflineAggregationInfo {
 
   public static final OfflineAggregationInfo FLOW_AGGREGATION =
       new OfflineAggregationInfo(FLOW_AGGREGATION_TABLE_NAME,
-          FLOW_AGGREGATION_PK_LIST, new PrimaryKeyStringSetter() {
-        @Override
-        public int setValues(PreparedStatement ps,
-            TimelineCollectorContext context, String[] extraInfo, int startPos)
-            throws SQLException {
-          int idx = startPos;
-          ps.setString(idx++, context.getUserId());
-          ps.setString(idx++, context.getClusterId());
-          ps.setString(idx++, context.getFlowName());
-          return idx;
-        }
-      });
+          FLOW_AGGREGATION_PK_LIST,
+          new PrimaryKeyStringSetter() {
+          @Override
+          public int setValues(PreparedStatement ps,
+              TimelineCollectorContext context, String[] extraInfo,
+              int startPos) throws SQLException {
+            int idx = startPos;
+            ps.setString(idx++, context.getUserId());
+            ps.setString(idx++, context.getClusterId());
+            ps.setString(idx++, context.getFlowName());
+            return idx;
+          }
+        });
 
   public static final OfflineAggregationInfo USER_AGGREGATION =
       new OfflineAggregationInfo(USER_AGGREGATION_TABLE_NAME,
-          USER_AGGREGATION_PK_LIST, new PrimaryKeyStringSetter() {
-        @Override
-        public int setValues(PreparedStatement ps,
-            TimelineCollectorContext context, String[] extraInfo, int startPos)
-            throws SQLException {
-          int idx = startPos;
-          ps.setString(idx++, context.getUserId());
-          ps.setString(idx++, context.getClusterId());
-          return idx;
-        }
-      });
+          USER_AGGREGATION_PK_LIST,
+          new PrimaryKeyStringSetter() {
+          @Override
+          public int setValues(PreparedStatement ps,
+              TimelineCollectorContext context, String[] extraInfo,
+              int startPos) throws SQLException {
+            int idx = startPos;
+            ps.setString(idx++, context.getUserId());
+            ps.setString(idx++, context.getClusterId());
+            return idx;
+          }
+        });
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java
index 2cb6c08..8a2e01a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Range.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+/**
+ * Encapsulates a range with start and end indices.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class Range {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java
index 1e82494..a81c717 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/Separator.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 public enum Separator {
 
   /**
-   * separator in key or column qualifier fields
+   * separator in key or column qualifier fields.
    */
   QUALIFIERS("!", "%0$"),
 
@@ -53,7 +53,7 @@ public enum Separator {
   private final String value;
 
   /**
-   * The URLEncoded version of this separator
+   * The URLEncoded version of this separator.
    */
   private final String encodedValue;
 
@@ -63,7 +63,7 @@ public enum Separator {
   private final byte[] bytes;
 
   /**
-   * The value quoted so that it can be used as a safe regex
+   * The value quoted so that it can be used as a safe regex.
    */
   private final String quotedValue;
 
@@ -99,7 +99,7 @@ public enum Separator {
   /**
    * Used to make token safe to be used with this separator without collisions.
    *
-   * @param token
+   * @param token Token to be encoded.
    * @return the token with any occurrences of this separator URLEncoded.
    */
   public String encode(String token) {
@@ -111,7 +111,9 @@ public enum Separator {
   }
 
   /**
-   * @param token
+   * Decode the token encoded using {@link #encode}.
+   *
+   * @param token Token to be decoded.
    * @return the token with any occurrences of the encoded separator replaced by
    *         the separator itself.
    */
@@ -193,7 +195,7 @@ public enum Separator {
    * Returns a single byte array containing all of the individual arrays
    * components separated by this separator.
    *
-   * @param components
+   * @param components Byte array components to be joined together.
    * @return byte array after joining the components
    */
   public byte[] join(byte[]... components) {
@@ -287,8 +289,8 @@ public enum Separator {
   public Collection<String> splitEncoded(String compoundValue) {
     List<String> result = new ArrayList<String>();
     if (compoundValue != null) {
-      for (String value : compoundValue.split(quotedValue)) {
-        result.add(decode(value));
+      for (String val : compoundValue.split(quotedValue)) {
+        result.add(decode(val));
       }
     }
     return result;
@@ -298,6 +300,7 @@ public enum Separator {
    * Splits the source array into multiple array segments using this separator,
    * up to a maximum of count items. This will naturally produce copied byte
    * arrays for each of the split segments.
+   *
    * @param source to be split
    * @param limit on how many segments are supposed to be returned. A
    *          non-positive value indicates no limit on number of segments.
@@ -311,6 +314,7 @@ public enum Separator {
    * Splits the source array into multiple array segments using this separator,
    * as many times as splits are found. This will naturally produce copied byte
    * arrays for each of the split segments.
+   *
    * @param source to be split
    * @return source split by this separator.
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
index bbf498a..8e6c259 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineHBaseSchemaConstants.java
@@ -24,11 +24,13 @@ import org.apache.hadoop.hbase.util.Bytes;
 /**
  * contains the constants used in the context of schema accesses for
  * {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
- * information
+ * information.
  */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
-public class TimelineHBaseSchemaConstants {
+public final class TimelineHBaseSchemaConstants {
+  private TimelineHBaseSchemaConstants() {
+  }
 
   /**
    * Used to create a pre-split for tables starting with a username in the
@@ -36,27 +38,28 @@ public class TimelineHBaseSchemaConstants {
    * separators) so that different installations can presplit based on their own
    * commonly occurring names.
    */
-  private final static byte[][] USERNAME_SPLITS = { Bytes.toBytes("a"),
-      Bytes.toBytes("ad"), Bytes.toBytes("an"), Bytes.toBytes("b"),
-      Bytes.toBytes("ca"), Bytes.toBytes("cl"), Bytes.toBytes("d"),
-      Bytes.toBytes("e"), Bytes.toBytes("f"), Bytes.toBytes("g"),
-      Bytes.toBytes("h"), Bytes.toBytes("i"), Bytes.toBytes("j"),
-      Bytes.toBytes("k"), Bytes.toBytes("l"), Bytes.toBytes("m"),
-      Bytes.toBytes("n"), Bytes.toBytes("o"), Bytes.toBytes("q"),
-      Bytes.toBytes("r"), Bytes.toBytes("s"), Bytes.toBytes("se"),
-      Bytes.toBytes("t"), Bytes.toBytes("u"), Bytes.toBytes("v"),
-      Bytes.toBytes("w"), Bytes.toBytes("x"), Bytes.toBytes("y"),
-      Bytes.toBytes("z") };
+  private final static byte[][] USERNAME_SPLITS = {
+      Bytes.toBytes("a"), Bytes.toBytes("ad"), Bytes.toBytes("an"),
+      Bytes.toBytes("b"), Bytes.toBytes("ca"), Bytes.toBytes("cl"),
+      Bytes.toBytes("d"), Bytes.toBytes("e"), Bytes.toBytes("f"),
+      Bytes.toBytes("g"), Bytes.toBytes("h"), Bytes.toBytes("i"),
+      Bytes.toBytes("j"), Bytes.toBytes("k"), Bytes.toBytes("l"),
+      Bytes.toBytes("m"), Bytes.toBytes("n"), Bytes.toBytes("o"),
+      Bytes.toBytes("q"), Bytes.toBytes("r"), Bytes.toBytes("s"),
+      Bytes.toBytes("se"), Bytes.toBytes("t"), Bytes.toBytes("u"),
+      Bytes.toBytes("v"), Bytes.toBytes("w"), Bytes.toBytes("x"),
+      Bytes.toBytes("y"), Bytes.toBytes("z")
+  };
 
   /**
-   * The length at which keys auto-split
+   * The length at which keys auto-split.
    */
   public static final String USERNAME_SPLIT_KEY_PREFIX_LENGTH = "4";
 
   /**
    * @return splits for splits where a user is a prefix.
    */
-  public final static byte[][] getUsernameSplits() {
+  public static byte[][] getUsernameSplits() {
     byte[][] kloon = USERNAME_SPLITS.clone();
     // Deep copy.
     for (int row = 0; row < USERNAME_SPLITS.length; row++) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
index e30f699..2328bba 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
@@ -45,15 +45,17 @@ import org.apache.hadoop.yarn.util.ConverterUtils;
  */
 @Public
 @Unstable
-public class TimelineStorageUtils {
+public final class TimelineStorageUtils {
+  private TimelineStorageUtils() {
+  }
 
-  /** empty bytes */
+  /** empty bytes. */
   public static final byte[] EMPTY_BYTES = new byte[0];
 
-  /** indicator for no limits for splitting */
+  /** indicator for no limits for splitting. */
   public static final int NO_LIMIT_SPLIT = -1;
 
-  /** milliseconds in one day */
+  /** milliseconds in one day. */
   public static final long MILLIS_ONE_DAY = 86400000L;
 
   /**
@@ -62,9 +64,9 @@ public class TimelineStorageUtils {
    * copied byte arrays for each of the split segments. To identify the split
    * ranges without the array copies, see {@link #splitRanges(byte[], byte[])}.
    *
-   * @param source
-   * @param separator
-   * @return byte[] array after splitting the source
+   * @param source Source array.
+   * @param separator Separator represented as a byte array.
+   * @return byte[][] after splitting the source
    */
   public static byte[][] split(byte[] source, byte[] separator) {
     return split(source, separator, NO_LIMIT_SPLIT);
@@ -76,10 +78,10 @@ public class TimelineStorageUtils {
    * copied byte arrays for each of the split segments. To identify the split
    * ranges without the array copies, see {@link #splitRanges(byte[], byte[])}.
    *
-   * @param source
-   * @param separator
+   * @param source Source array.
+   * @param separator Separator represented as a byte array.
    * @param limit a non-positive value indicates no limit on number of segments.
-   * @return byte[][] after splitting the input source
+   * @return byte[][] after splitting the input source.
    */
   public static byte[][] split(byte[] source, byte[] separator, int limit) {
     List<Range> segments = splitRanges(source, separator, limit);
@@ -100,6 +102,10 @@ public class TimelineStorageUtils {
    * Returns a list of ranges identifying [start, end) -- closed, open --
    * positions within the source byte array that would be split using the
    * separator byte array.
+   *
+   * @param source Source array.
+   * @param separator Separator represented as a byte array.
+   * @return a list of ranges.
    */
   public static List<Range> splitRanges(byte[] source, byte[] separator) {
     return splitRanges(source, separator, NO_LIMIT_SPLIT);
@@ -113,6 +119,7 @@ public class TimelineStorageUtils {
    * @param source the source data
    * @param separator the separator pattern to look for
    * @param limit the maximum number of splits to identify in the source
+   * @return a list of ranges.
    */
   public static List<Range> splitRanges(byte[] source, byte[] separator,
       int limit) {
@@ -132,7 +139,7 @@ public class TimelineStorageUtils {
         // everything else goes in one final segment
         break;
       }
-	      segments.add(new Range(start, i));
+      segments.add(new Range(start, i));
       start = i + separator.length;
       // i will be incremented again in outer for loop
       i += separator.length - 1;
@@ -219,9 +226,9 @@ public class TimelineStorageUtils {
 
   /**
    * returns the timestamp of that day's start (which is midnight 00:00:00 AM)
-   * for a given input timestamp
+   * for a given input timestamp.
    *
-   * @param ts
+   * @param ts Timestamp.
    * @return timestamp of that day's beginning (midnight)
    */
   public static long getTopOfTheDayTimestamp(long ts) {
@@ -233,9 +240,9 @@ public class TimelineStorageUtils {
    * Combines the input array of attributes and the input aggregation operation
    * into a new array of attributes.
    *
-   * @param attributes
-   * @param aggOp
-   * @return array of combined attributes
+   * @param attributes Attributes to be combined.
+   * @param aggOp Aggregation operation.
+   * @return array of combined attributes.
    */
   public static Attribute[] combineAttributes(Attribute[] attributes,
       AggregationOperation aggOp) {
@@ -257,8 +264,8 @@ public class TimelineStorageUtils {
    * Returns a number for the new array size. The new array is the combination
    * of input array of attributes and the input aggregation operation.
    *
-   * @param attributes
-   * @param aggOp
+   * @param attributes Attributes.
+   * @param aggOp Aggregation operation.
    * @return the size for the new array
    */
   private static int getNewLengthCombinedAttributes(Attribute[] attributes,
@@ -283,16 +290,17 @@ public class TimelineStorageUtils {
   }
 
   /**
-   * checks if an application has finished
+   * checks if an application has finished.
    *
-   * @param te
+   * @param te TimlineEntity object.
    * @return true if application has finished else false
    */
   public static boolean isApplicationFinished(TimelineEntity te) {
     SortedSet<TimelineEvent> allEvents = te.getEvents();
     if ((allEvents != null) && (allEvents.size() > 0)) {
       TimelineEvent event = allEvents.last();
-      if (event.getId().equals(ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
+      if (event.getId().equals(
+          ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
         return true;
       }
     }
@@ -300,26 +308,27 @@ public class TimelineStorageUtils {
   }
 
   /**
-   * get the time at which an app finished
+   * get the time at which an app finished.
    *
-   * @param te
+   * @param te TimelineEntity object.
    * @return true if application has finished else false
    */
   public static long getApplicationFinishedTime(TimelineEntity te) {
     SortedSet<TimelineEvent> allEvents = te.getEvents();
     if ((allEvents != null) && (allEvents.size() > 0)) {
       TimelineEvent event = allEvents.last();
-      if (event.getId().equals(ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
+      if (event.getId().equals(
+          ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
         return event.getTimestamp();
       }
     }
-    return 0l;
+    return 0L;
   }
 
   /**
    * Checks if the input TimelineEntity object is an ApplicationEntity.
    *
-   * @param te
+   * @param te TimelineEntity object.
    * @return true if input is an ApplicationEntity, false otherwise
    */
   public static boolean isApplicationEntity(TimelineEntity te) {
@@ -329,7 +338,7 @@ public class TimelineStorageUtils {
   /**
    * Checks for the APPLICATION_CREATED event.
    *
-   * @param te
+   * @param te TimelineEntity object.
    * @return true is application event exists, false otherwise
    */
   public static boolean isApplicationCreated(TimelineEntity te) {
@@ -346,9 +355,9 @@ public class TimelineStorageUtils {
 
   /**
    * Returns the first seen aggregation operation as seen in the list of input
-   * tags or null otherwise
+   * tags or null otherwise.
    *
-   * @param tags
+   * @param tags list of HBase tags.
    * @return AggregationOperation
    */
   public static AggregationOperation getAggregationOperationFromTagsList(
@@ -366,8 +375,8 @@ public class TimelineStorageUtils {
   /**
    * Creates a {@link Tag} from the input attribute.
    *
-   * @param attribute
-   * @return Tag
+   * @param attribute Attribute from which tag has to be fetched.
+   * @return a HBase Tag.
    */
   public static Tag getTagFromAttribute(Entry<String, byte[]> attribute) {
     // attribute could be either an Aggregation Operation or
@@ -380,8 +389,9 @@ public class TimelineStorageUtils {
       return t;
     }
 
-    AggregationCompactionDimension aggCompactDim = AggregationCompactionDimension
-        .getAggregationCompactionDimension(attribute.getKey());
+    AggregationCompactionDimension aggCompactDim =
+        AggregationCompactionDimension.getAggregationCompactionDimension(
+            attribute.getKey());
     if (aggCompactDim != null) {
       Tag t = new Tag(aggCompactDim.getTagType(), attribute.getValue());
       return t;
@@ -475,7 +485,8 @@ public class TimelineStorageUtils {
 
   /**
    * Checks if passed object is of integral type(Short/Integer/Long).
-   * @param obj
+   *
+   * @param obj Object to be checked.
    * @return true if object passed is of type Short or Integer or Long, false
    * otherwise.
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java
index 555b64e..7238efa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java
@@ -40,6 +40,8 @@ public class TimestampGenerator {
   /**
    * Returns the current wall clock time in milliseconds, multiplied by the
    * required precision.
+   *
+   * @return current timestamp.
    */
   public long currentTime() {
     // We want to align cell timestamps with current time.
@@ -58,6 +60,8 @@ public class TimestampGenerator {
    * sustained rate of more than 1M hbase writes per second AND if region fails
    * over within that time range of timestamps being generated then there may be
    * collisions writing to a cell version of the same column.
+   *
+   * @return unique timestamp.
    */
   public long getUniqueTimestamp() {
     long lastTs;
@@ -78,8 +82,8 @@ public class TimestampGenerator {
    * column at the same time, then say appId of 1001 will overlap with appId of
    * 001 and there may be collisions for that flow run's specific column.
    *
-   * @param incomingTS
-   * @param appId
+   * @param incomingTS Timestamp to be converted.
+   * @param appId Application Id.
    * @return a timestamp multiplied with TS_MULTIPLIER and last few digits of
    *         application id
    */
@@ -101,9 +105,9 @@ public class TimestampGenerator {
 
   /**
    * truncates the last few digits of the timestamp which were supplemented by
-   * the TimestampGenerator#getSupplementedTimestamp function
+   * the TimestampGenerator#getSupplementedTimestamp function.
    *
-   * @param incomingTS
+   * @param incomingTS Timestamp to be truncated.
    * @return a truncated timestamp value
    */
   public static long getTruncatedTimestamp(long incomingTS) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java
index 2388ba5..757a6d3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ValueConverter.java
@@ -28,18 +28,20 @@ public interface ValueConverter {
 
   /**
    * Encode an object as a byte array depending on the converter implementation.
-   * @param value
+   *
+   * @param value Value to be encoded.
    * @return a byte array
-   * @throws IOException
+   * @throws IOException if any problem is encountered while encoding.
    */
   byte[] encodeValue(Object value) throws IOException;
 
   /**
    * Decode a byte array and convert it into an object depending on the
    * converter implementation.
-   * @param bytes
+   *
+   * @param bytes Byte array to be decoded.
    * @return an object
-   * @throws IOException
+   * @throws IOException if any problem is encountered while decoding.
    */
   Object decodeValue(byte[] bytes) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java
index deb8bd5..f47ba93 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumn.java
@@ -39,7 +39,7 @@ public enum EntityColumn implements Column<EntityTable> {
   ID(EntityColumnFamily.INFO, "id"),
 
   /**
-   * The type of entity
+   * The type of entity.
    */
   TYPE(EntityColumnFamily.INFO, "type"),
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java
index 5b71228..f3c7e7f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityColumnPrefix.java
@@ -54,7 +54,7 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
   INFO(EntityColumnFamily.INFO, "i"),
 
   /**
-   * Lifecycle events for an entity
+   * Lifecycle events for an entity.
    */
   EVENT(EntityColumnFamily.INFO, "e"),
 
@@ -215,7 +215,8 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
    *         is to facilitate returning byte arrays of values that were not
    *         Strings. If they can be treated as Strings, you should use
    *         {@link #readResults(Result)} instead.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while reading
+   *     result.
    */
   public Map<?, Object> readResultsHavingCompoundColumnQualifiers(Result result)
           throws IOException {
@@ -277,8 +278,8 @@ public enum EntityColumnPrefix implements ColumnPrefix<EntityTable> {
     for (EntityColumnPrefix ecp : EntityColumnPrefix.values()) {
       // Find a match based column family and on name.
       if (ecp.columnFamily.equals(columnFamily)
-          && (((columnPrefix == null) && (ecp.getColumnPrefix() == null)) || (ecp
-              .getColumnPrefix().equals(columnPrefix)))) {
+          && (((columnPrefix == null) && (ecp.getColumnPrefix() == null)) ||
+          (ecp.getColumnPrefix().equals(columnPrefix)))) {
         return ecp;
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java
index 3eaa78c..04c633c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityRowKey.java
@@ -74,14 +74,14 @@ public class EntityRowKey {
 
   /**
    * Constructs a row key prefix for the entity table as follows:
-   * {@code userName!clusterId!flowName!flowRunId!AppId}
+   * {@code userName!clusterId!flowName!flowRunId!AppId}.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
-   * @param appId
-   * @return byte array with the row key prefix
+   * @param clusterId Context cluster id.
+   * @param userId User name.
+   * @param flowName Flow name.
+   * @param flowRunId Run Id for the flow.
+   * @param appId Application Id.
+   * @return byte array with the row key prefix.
    */
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
       String flowName, Long flowRunId, String appId) {
@@ -97,15 +97,17 @@ public class EntityRowKey {
 
   /**
    * Constructs a row key prefix for the entity table as follows:
-   * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!}
+   * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!}.
+   * Typically used while querying multiple entities of a particular entity
+   * type.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
-   * @param appId
-   * @param entityType
-   * @return byte array with the row key prefix
+   * @param clusterId Context cluster id.
+   * @param userId User name.
+   * @param flowName Flow name.
+   * @param flowRunId Run Id for the flow.
+   * @param appId Application Id.
+   * @param entityType Entity type.
+   * @return byte array with the row key prefix.
    */
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
       String flowName, Long flowRunId, String appId, String entityType) {
@@ -123,16 +125,17 @@ public class EntityRowKey {
 
   /**
    * Constructs a row key for the entity table as follows:
-   * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!entityId}
+   * {@code userName!clusterId!flowName!flowRunId!AppId!entityType!entityId}.
+   * Typically used while querying a specific entity.
    *
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
-   * @param appId
-   * @param entityType
-   * @param entityId
-   * @return byte array with the row key
+   * @param clusterId Context cluster id.
+   * @param userId User name.
+   * @param flowName Flow name.
+   * @param flowRunId Run Id for the flow.
+   * @param appId Application Id.
+   * @param entityType Entity type.
+   * @param entityId Entity Id.
+   * @return byte array with the row key.
    */
   public static byte[] getRowKey(String clusterId, String userId,
       String flowName, Long flowRunId, String appId, String entityType,
@@ -151,6 +154,9 @@ public class EntityRowKey {
 
   /**
    * Given the raw row key as bytes, returns the row key as an object.
+   *
+   * @param rowKey byte representation of row key.
+   * @return An <cite>EntityRowKey</cite> object.
    */
   public static EntityRowKey parseRowKey(byte[] rowKey) {
     byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
index d7d770b..3e3e3ab 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
@@ -69,27 +69,27 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBas
  * </pre>
  */
 public class EntityTable extends BaseTable<EntityTable> {
-  /** entity prefix */
+  /** entity prefix. */
   private static final String PREFIX =
       YarnConfiguration.TIMELINE_SERVICE_PREFIX + "entity";
 
-  /** config param name that specifies the entity table name */
+  /** config param name that specifies the entity table name. */
   public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
 
   /**
    * config param name that specifies the TTL for metrics column family in
-   * entity table
+   * entity table.
    */
   private static final String METRICS_TTL_CONF_NAME = PREFIX
       + ".table.metrics.ttl";
 
-  /** default value for entity table name */
+  /** default value for entity table name. */
   private static final String DEFAULT_TABLE_NAME = "timelineservice.entity";
 
-  /** default TTL is 30 days for metrics timeseries */
+  /** default TTL is 30 days for metrics timeseries. */
   private static final int DEFAULT_METRICS_TTL = 2592000;
 
-  /** default max number of versions */
+  /** default max number of versions. */
   private static final int DEFAULT_METRICS_MAX_VERSIONS = 1000;
 
   private static final Log LOG = LogFactory.getLog(EntityTable.class);
@@ -139,8 +139,8 @@ public class EntityTable extends BaseTable<EntityTable> {
     metricsCF.setMaxVersions(DEFAULT_METRICS_MAX_VERSIONS);
     metricsCF.setTimeToLive(hbaseConf.getInt(METRICS_TTL_CONF_NAME,
         DEFAULT_METRICS_TTL));
-    entityTableDescp
-        .setRegionSplitPolicyClassName("org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy");
+    entityTableDescp.setRegionSplitPolicyClassName(
+        "org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy");
     entityTableDescp.setValue("KeyPrefixRegionSplitPolicy.prefix_length",
         TimelineHBaseSchemaConstants.USERNAME_SPLIT_KEY_PREFIX_LENGTH);
     admin.createTable(entityTableDescp,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
index 26f1cc5..0e9578a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java
@@ -16,6 +16,10 @@
  * limitations under the License.
  */
 
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage.entity
+ * contains classes related to implementation for entity table.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage.entity;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java
index ff12c7b..4e2cf2d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationCompactionDimension.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 public enum AggregationCompactionDimension {
 
   /**
-   * the application id
+   * the application id.
    */
   APPLICATION_ID((byte) 101);
 
@@ -50,8 +50,8 @@ public enum AggregationCompactionDimension {
     return this.inBytes.clone();
   }
 
-  public static AggregationCompactionDimension getAggregationCompactionDimension(
-      String aggCompactDimStr) {
+  public static AggregationCompactionDimension
+      getAggregationCompactionDimension(String aggCompactDimStr) {
     for (AggregationCompactionDimension aggDim : AggregationCompactionDimension
         .values()) {
       if (aggDim.name().equals(aggCompactDimStr)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java
index c635ce6..6240e81 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/AggregationOperation.java
@@ -36,17 +36,17 @@ public enum AggregationOperation {
   MAX((byte) 73),
 
   /**
-   * The metrics of the flow
+   * The metrics of the flow.
    */
   SUM((byte) 79),
 
   /**
-   * application running
+   * application running.
    */
   SUM_FINAL((byte) 83),
 
   /**
-   * compact
+   * compact.
    */
   COMPACT((byte) 89);
 
@@ -71,8 +71,8 @@ public enum AggregationOperation {
   }
 
   /**
-   * returns the AggregationOperation enum that represents that string
-   * @param aggOpStr
+   * returns the AggregationOperation enum that represents that string.
+   * @param aggOpStr Aggregation operation.
    * @return the AggregationOperation enum that represents that string
    */
   public static AggregationOperation getAggregationOperation(String aggOpStr) {


[3/4] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

Posted by sj...@apache.org.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
index fc05310..12daa95 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
@@ -388,15 +388,15 @@ public class RMActiveServiceContext {
   @Private
   @Unstable
   public void setRMTimelineCollectorManager(
-      RMTimelineCollectorManager timelineCollectorManager) {
-    this.timelineCollectorManager = timelineCollectorManager;
+      RMTimelineCollectorManager collectorManager) {
+    this.timelineCollectorManager = collectorManager;
   }
 
   @Private
   @Unstable
   public void setSystemMetricsPublisher(
-      SystemMetricsPublisher systemMetricsPublisher) {
-    this.systemMetricsPublisher = systemMetricsPublisher;
+      SystemMetricsPublisher metricsPublisher) {
+    this.systemMetricsPublisher = metricsPublisher;
   }
 
   @Private

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
index e122ab4..4c72912 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
@@ -379,8 +379,8 @@ public class RMContextImpl implements RMContext {
   
   @Override
   public void setSystemMetricsPublisher(
-      SystemMetricsPublisher systemMetricsPublisher) {
-    this.systemMetricsPublisher = systemMetricsPublisher;
+      SystemMetricsPublisher metricsPublisher) {
+    this.systemMetricsPublisher = metricsPublisher;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
index a8c00a4..d4a4fc3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
@@ -30,6 +30,10 @@ import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.event.Event;
 import org.apache.hadoop.yarn.event.EventHandler;
 
+/**
+ * Abstract implementation of SystemMetricsPublisher which is then extended by
+ * metrics publisher implementations depending on timeline service version.
+ */
 public abstract class AbstractSystemMetricsPublisher extends CompositeService
     implements SystemMetricsPublisher {
   private MultiThreadedDispatcher dispatcher;
@@ -46,13 +50,18 @@ public abstract class AbstractSystemMetricsPublisher extends CompositeService
   protected void serviceInit(Configuration conf) throws Exception {
     dispatcher =
     new MultiThreadedDispatcher(getConfig().getInt(
-        YarnConfiguration.RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE,
-        YarnConfiguration.DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE));
+        YarnConfiguration.
+        RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE,
+        YarnConfiguration.
+        DEFAULT_RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE));
     dispatcher.setDrainEventsOnStop();
     addIfService(dispatcher);
     super.serviceInit(conf);
   }
 
+  /**
+   * Dispatches ATS related events using multiple threads.
+   */
   @SuppressWarnings({ "rawtypes", "unchecked" })
   public static class MultiThreadedDispatcher extends CompositeService
       implements Dispatcher {
@@ -107,7 +116,7 @@ public abstract class AbstractSystemMetricsPublisher extends CompositeService
   }
 
   /**
-   * EventType which is used while publishing the events
+   * EventType which is used while publishing the events.
    */
   protected static enum SystemMetricsEventType {
     PUBLISH_ENTITY, PUBLISH_APPLICATION_FINISHED_ENTITY
@@ -158,9 +167,10 @@ public abstract class AbstractSystemMetricsPublisher extends CompositeService
         if (other.getType() != null) {
           return false;
         }
-      } else
+      } else {
         if (!appId.equals(other.appId) || !getType().equals(other.getType())) {
-        return false;
+          return false;
+        }
       }
       return true;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/NoOpSystemMetricPublisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/NoOpSystemMetricPublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/NoOpSystemMetricPublisher.java
index 1810df1..c0c066a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/NoOpSystemMetricPublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/NoOpSystemMetricPublisher.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
 
 /**
  * This class does nothing when any of the methods are invoked on
- * SystemMetricsPublisher
+ * SystemMetricsPublisher.
  */
 public class NoOpSystemMetricPublisher implements SystemMetricsPublisher{
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/SystemMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/SystemMetricsPublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/SystemMetricsPublisher.java
index f895bba..8e8acbc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/SystemMetricsPublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/SystemMetricsPublisher.java
@@ -24,6 +24,9 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
 import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
 
+/**
+ * Interface used to publish app/container events to timelineservice.
+ */
 public interface SystemMetricsPublisher {
 
   void appCreated(RMApp app, long createdTime);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
index ddc8a16..7dc5073 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
@@ -43,6 +43,10 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptS
 import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 
+/**
+ * This class is responsible for posting application, appattempt &amp; Container
+ * lifecycle related events to timeline service v1.
+ */
 public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher {
 
   private static final Log LOG =
@@ -137,8 +141,8 @@ public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher {
 
     entity.addEvent(tEvent);
     // sync sending of finish event to avoid possibility of saving application
-    // finished state in RMStateStore save without publishing in ATS
-    putEntity(entity);// sync event so that ATS update is done without fail
+    // finished state in RMStateStore save without publishing in ATS.
+    putEntity(entity); // sync event so that ATS update is done without fail.
   }
 
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
index 1b3aa6f..14073d1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
@@ -60,14 +60,14 @@ import com.google.common.annotations.VisibleForTesting;
 
 /**
  * This class is responsible for posting application, appattempt &amp; Container
- * lifecycle related events to timeline service V2
+ * lifecycle related events to timeline service v2.
  */
 @Private
 @Unstable
 public class TimelineServiceV2Publisher extends AbstractSystemMetricsPublisher {
   private static final Log LOG =
       LogFactory.getLog(TimelineServiceV2Publisher.class);
-  protected RMTimelineCollectorManager rmTimelineCollectorManager;
+  private RMTimelineCollectorManager rmTimelineCollectorManager;
   private boolean publishContainerMetrics;
 
   public TimelineServiceV2Publisher(RMContext rmContext) {
@@ -342,7 +342,8 @@ public class TimelineServiceV2Publisher extends AbstractSystemMetricsPublisher {
     }
   }
 
-  private static ContainerEntity createContainerEntity(ContainerId containerId) {
+  private static ContainerEntity createContainerEntity(
+      ContainerId containerId) {
     ContainerEntity entity = new ContainerEntity();
     entity.setId(containerId.toString());
     entity.setParent(new Identifier(TimelineEntityType.YARN_APPLICATION_ATTEMPT

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/package-info.java
new file mode 100644
index 0000000..a8a3804
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/package-info.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.resourcemanager.metrics contains
+ * classes related to publishing app/container events to ATS.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.resourcemanager.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppCollectorUpdateEvent.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppCollectorUpdateEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppCollectorUpdateEvent.java
index 698c9b5..9642911 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppCollectorUpdateEvent.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppCollectorUpdateEvent.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
 
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 
+/**
+ * Event used for updating collector address in RMApp on node heartbeat.
+ */
 public class RMAppCollectorUpdateEvent extends RMAppEvent {
 
   private final String appCollectorAddr;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
index 1ee6247..62b13ee 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
@@ -97,7 +97,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
 import org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector;
-import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 import org.apache.hadoop.yarn.server.webproxy.ProxyUriUtils;
 import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
@@ -613,8 +612,8 @@ public class RMAppImpl implements RMApp, Recoverable {
   }
 
   @Override
-  public void setCollectorAddr(String collectorAddr) {
-    this.collectorAddr = collectorAddr;
+  public void setCollectorAddr(String collectorAddress) {
+    this.collectorAddr = collectorAddress;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/RMTimelineCollectorManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/RMTimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/RMTimelineCollectorManager.java
index 116bf64..ff055a1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/RMTimelineCollectorManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/RMTimelineCollectorManager.java
@@ -28,7 +28,10 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector
 import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 
-
+/**
+ * This class extends TimelineCollectorManager to provide RM specific
+ * implementations.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class RMTimelineCollectorManager extends TimelineCollectorManager {
@@ -44,8 +47,8 @@ public class RMTimelineCollectorManager extends TimelineCollectorManager {
     RMApp app = rmContext.getRMApps().get(appId);
     if (app == null) {
       throw new YarnRuntimeException(
-          "Unable to get the timeline collector context info for a non-existing app " +
-              appId);
+          "Unable to get the timeline collector context info for a " +
+          "non-existing app " + appId);
     }
     String userId = app.getUser();
     if (userId != null && !userId.isEmpty()) {
@@ -57,18 +60,18 @@ public class RMTimelineCollectorManager extends TimelineCollectorManager {
         continue;
       }
       switch (parts[0].toUpperCase()) {
-        case TimelineUtils.FLOW_NAME_TAG_PREFIX:
-          collector.getTimelineEntityContext().setFlowName(parts[1]);
-          break;
-        case TimelineUtils.FLOW_VERSION_TAG_PREFIX:
-          collector.getTimelineEntityContext().setFlowVersion(parts[1]);
-          break;
-        case TimelineUtils.FLOW_RUN_ID_TAG_PREFIX:
-          collector.getTimelineEntityContext().setFlowRunId(
-              Long.parseLong(parts[1]));
-          break;
-        default:
-          break;
+      case TimelineUtils.FLOW_NAME_TAG_PREFIX:
+        collector.getTimelineEntityContext().setFlowName(parts[1]);
+        break;
+      case TimelineUtils.FLOW_VERSION_TAG_PREFIX:
+        collector.getTimelineEntityContext().setFlowVersion(parts[1]);
+        break;
+      case TimelineUtils.FLOW_RUN_ID_TAG_PREFIX:
+        collector.getTimelineEntityContext().setFlowRunId(
+            Long.parseLong(parts[1]));
+        break;
+      default:
+        break;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/package-info.java
new file mode 100644
index 0000000..c470011
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/timelineservice/package-info.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.resourcemanager.timelineservice
+ * contains classes related to handling of app level collectors.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.resourcemanager.timelineservice;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
index b148491..36dd7b0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
@@ -55,7 +55,8 @@ public class AppLevelTimelineCollector extends TimelineCollector {
     // Current user usually is not the app user, but keep this field non-null
     context.setUserId(UserGroupInformation.getCurrentUser().getShortUserName());
     // Use app ID to generate a default flow name for orphan app
-    context.setFlowName(TimelineUtils.generateDefaultFlowNameBasedOnAppId(appId));
+    context.setFlowName(
+        TimelineUtils.generateDefaultFlowNameBasedOnAppId(appId));
     // Set the flow version to string 1 if it's an orphan app
     context.setFlowVersion("1");
     // Set the flow run ID to 1 if it's an orphan app

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java
index 0977ed9..785fb19 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java
@@ -48,13 +48,17 @@ import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
+/**
+ * Class on the NodeManager side that manages adding and removing collectors and
+ * their lifecycle. Also instantiates the per-node collector webapp.
+ */
 @Private
 @Unstable
 public class NodeTimelineCollectorManager extends TimelineCollectorManager {
   private static final Log LOG =
       LogFactory.getLog(NodeTimelineCollectorManager.class);
 
-  // REST server for this collector manager
+  // REST server for this collector manager.
   private HttpServer2 timelineRestServer;
 
   private String timelineRestServerBindAddress;
@@ -97,7 +101,7 @@ public class NodeTimelineCollectorManager extends TimelineCollectorManager {
   }
 
   /**
-   * Launch the REST web server for this collector manager
+   * Launch the REST web server for this collector manager.
    */
   private void startWebApp() {
     Configuration conf = getConfig();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
index b738530..041e7c2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
@@ -108,6 +108,7 @@ public class PerNodeTimelineCollectorsAuxService extends AuxiliaryService {
    * The collector is also initialized and started. If the service already
    * exists, no new service is created.
    *
+   * @param appId Application Id to be added.
    * @return whether it was added successfully
    */
   public boolean addApplication(ApplicationId appId) {
@@ -122,6 +123,7 @@ public class PerNodeTimelineCollectorsAuxService extends AuxiliaryService {
    * collector is also stopped as a result. If the collector does not exist, no
    * change is made.
    *
+   * @param appId Application Id to be removed.
    * @return whether it was removed successfully
    */
   public boolean removeApplication(ApplicationId appId) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollector.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollector.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollector.java
index bb7db12..15187d1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollector.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollector.java
@@ -80,6 +80,8 @@ public abstract class TimelineCollector extends CompositeService {
    * @param entities entities to post
    * @param callerUgi the caller UGI
    * @return the response that contains the result of the post.
+   * @throws IOException if there is any exception encountered while putting
+   *     entities.
    */
   public TimelineWriteResponse putEntities(TimelineEntities entities,
       UserGroupInformation callerUgi) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java
index 165754d..8f74ffb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java
@@ -68,8 +68,10 @@ public class TimelineCollectorManager extends AbstractService {
     // basis
     writerFlusher = Executors.newSingleThreadScheduledExecutor();
     flushInterval = conf.getInt(
-        YarnConfiguration.TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS,
-        YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS);
+        YarnConfiguration.
+        TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS,
+        YarnConfiguration.
+        DEFAULT_TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS);
     super.serviceInit(conf);
   }
 
@@ -102,6 +104,8 @@ public class TimelineCollectorManager extends AbstractService {
    * Put the collector into the collection if an collector mapped by id does
    * not exist.
    *
+   * @param appId Application Id for which collector needs to be put.
+   * @param collector timeline collector to be put.
    * @throws YarnRuntimeException if there  was any exception in initializing
    *                              and starting the app level service
    * @return the collector associated with id after the potential put.
@@ -140,6 +144,7 @@ public class TimelineCollectorManager extends AbstractService {
    * Removes the collector for the specified id. The collector is also stopped
    * as a result. If the collector does not exist, no change is made.
    *
+   * @param appId Application Id to remove.
    * @return whether it was removed successfully
    */
   public boolean remove(ApplicationId appId) {
@@ -162,6 +167,7 @@ public class TimelineCollectorManager extends AbstractService {
   /**
    * Returns the collector for the specified id.
    *
+   * @param appId Application Id for which we need to get the collector.
    * @return the collector or null if it does not exist
    */
   public TimelineCollector get(ApplicationId appId) {
@@ -171,6 +177,8 @@ public class TimelineCollectorManager extends AbstractService {
   /**
    * Returns whether the collector for the specified id exists in this
    * collection.
+   * @param appId Application Id.
+   * @return true if collector for the app id is found, false otherwise.
    */
   public boolean containsTimelineCollector(ApplicationId appId) {
     return collectors.containsKey(appId);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java
index 8f595e2..2dff937 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java
@@ -74,6 +74,9 @@ public class TimelineCollectorWebService {
 
   private @Context ServletContext context;
 
+  /**
+   * Gives information about timeline collector.
+   */
   @XmlRootElement(name = "about")
   @XmlAccessorType(XmlAccessType.NONE)
   @Public
@@ -86,8 +89,8 @@ public class TimelineCollectorWebService {
 
     }
 
-    public AboutInfo(String about) {
-      this.about = about;
+    public AboutInfo(String abt) {
+      this.about = abt;
     }
 
     @XmlElement(name = "About")
@@ -95,14 +98,18 @@ public class TimelineCollectorWebService {
       return about;
     }
 
-    public void setAbout(String about) {
-      this.about = about;
+    public void setAbout(String abt) {
+      this.about = abt;
     }
 
   }
 
   /**
    * Return the description of the timeline web services.
+   *
+   * @param req Servlet request.
+   * @param res Servlet response.
+   * @return description of timeline web service.
    */
   @GET
   @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
@@ -117,6 +124,15 @@ public class TimelineCollectorWebService {
    * Accepts writes to the collector, and returns a response. It simply routes
    * the request to the app level collector. It expects an application as a
    * context.
+   *
+   * @param req Servlet request.
+   * @param res Servlet response.
+   * @param async flag indicating whether its an async put or not. "true"
+   *     indicates, its an async call. If null, its considered false.
+   * @param appId Application Id to which the entities to be put belong to. If
+   *     appId is not there or it cannot be parsed, HTTP 400 will be sent back.
+   * @param entities timeline entities to be put.
+   * @return a Response with appropriate HTTP status.
    */
   @PUT
   @Path("/entities")
@@ -202,29 +218,29 @@ public class TimelineCollectorWebService {
       }
       if (type != null) {
         switch (type) {
-          case YARN_CLUSTER:
-            entitiesToReturn.addEntity(new ClusterEntity(entity));
-            break;
-          case YARN_FLOW_RUN:
-            entitiesToReturn.addEntity(new FlowRunEntity(entity));
-            break;
-          case YARN_APPLICATION:
-            entitiesToReturn.addEntity(new ApplicationEntity(entity));
-            break;
-          case YARN_APPLICATION_ATTEMPT:
-            entitiesToReturn.addEntity(new ApplicationAttemptEntity(entity));
-            break;
-          case YARN_CONTAINER:
-            entitiesToReturn.addEntity(new ContainerEntity(entity));
-            break;
-          case YARN_QUEUE:
-            entitiesToReturn.addEntity(new QueueEntity(entity));
-            break;
-          case YARN_USER:
-            entitiesToReturn.addEntity(new UserEntity(entity));
-            break;
-          default:
-            break;
+        case YARN_CLUSTER:
+          entitiesToReturn.addEntity(new ClusterEntity(entity));
+          break;
+        case YARN_FLOW_RUN:
+          entitiesToReturn.addEntity(new FlowRunEntity(entity));
+          break;
+        case YARN_APPLICATION:
+          entitiesToReturn.addEntity(new ApplicationEntity(entity));
+          break;
+        case YARN_APPLICATION_ATTEMPT:
+          entitiesToReturn.addEntity(new ApplicationAttemptEntity(entity));
+          break;
+        case YARN_CONTAINER:
+          entitiesToReturn.addEntity(new ContainerEntity(entity));
+          break;
+        case YARN_QUEUE:
+          entitiesToReturn.addEntity(new QueueEntity(entity));
+          break;
+        case YARN_USER:
+          entitiesToReturn.addEntity(new UserEntity(entity));
+          break;
+        default:
+          break;
         }
       } else {
         entitiesToReturn.addEntity(entity);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/package-info.java
new file mode 100644
index 0000000..1f7dd23
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/package-info.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.collector contains
+ * classes which can be used across collector. This package contains classes
+ * which are not related to storage implementations though.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.yarn.server.timelineservice.collector;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderManager.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderManager.java
index 84a97ea..4cff3bc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderManager.java
@@ -34,6 +34,11 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
 
 import com.google.common.annotations.VisibleForTesting;
 
+/**
+ * This class wraps over the timeline reader store implementation. It does some
+ * non trivial manipulation of the timeline data before or after getting
+ * it from the backend store.
+ */
 @Private
 @Unstable
 public class TimelineReaderManager extends AbstractService {
@@ -114,9 +119,19 @@ public class TimelineReaderManager extends AbstractService {
   }
 
   /**
-   * Get a set of entities matching given predicates. The meaning of each
-   * argument has been documented with {@link TimelineReader#getEntities}.
+   * Get a set of entities matching given predicates by making a call to
+   * backend storage implementation. The meaning of each argument has been
+   * documented in detail with {@link TimelineReader#getEntities}.If cluster ID
+   * has not been supplied by the client, fills the cluster id from config
+   * before making a call to backend storage. After fetching entities from
+   * backend, fills the appropriate UID based on entity type for each entity.
    *
+   * @param context Timeline context within the scope of which entities have to
+   *     be fetched.
+   * @param filters Filters which limit the number of entities to be returned.
+   * @param dataToRetrieve Data to carry in each entity fetched.
+   * @return a set of <cite>TimelineEntity</cite> objects.
+   * @throws IOException if any problem occurs while getting entities.
    * @see TimelineReader#getEntities
    */
   public Set<TimelineEntity> getEntities(TimelineReaderContext context,
@@ -135,9 +150,18 @@ public class TimelineReaderManager extends AbstractService {
   }
 
   /**
-   * Get single timeline entity. The meaning of each argument has been
-   * documented with {@link TimelineReader#getEntity}.
+   * Get single timeline entity by making a call to backend storage
+   * implementation. The meaning of each argument in detail has been
+   * documented with {@link TimelineReader#getEntity}. If cluster ID has not
+   * been supplied by the client, fills the cluster id from config before making
+   * a call to backend storage. After fetching entity from backend, fills the
+   * appropriate UID based on entity type.
    *
+   * @param context Timeline context within the scope of which entity has to be
+   *     fetched.
+   * @param dataToRetrieve Data to carry in the entity fetched.
+   * @return A <cite>TimelineEntity</cite> object if found, null otherwise.
+   * @throws IOException  if any problem occurs while getting entity.
    * @see TimelineReader#getEntity
    */
   public TimelineEntity getEntity(TimelineReaderContext context,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
index 6b4213d..97725e6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
@@ -49,7 +49,7 @@ import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 
-/** Main class for Timeline Reader */
+/** Main class for Timeline Reader. */
 @Private
 @Unstable
 public class TimelineReaderServer extends CompositeService {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
index d68199a..0b9549b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
@@ -73,24 +73,25 @@ public class TimelineReaderWebServices {
   private static final String DATE_PATTERN = "yyyyMMdd";
 
   @VisibleForTesting
-  static ThreadLocal<DateFormat> DATE_FORMAT = new ThreadLocal<DateFormat>() {
-    @Override
-    protected DateFormat initialValue() {
-      SimpleDateFormat format =
-          new SimpleDateFormat(DATE_PATTERN, Locale.ENGLISH);
-      format.setTimeZone(TimeZone.getTimeZone("GMT"));
-      format.setLenient(false);
-      return format;
-    }
-  };
+  static final ThreadLocal<DateFormat> DATE_FORMAT =
+      new ThreadLocal<DateFormat>() {
+      @Override
+      protected DateFormat initialValue() {
+        SimpleDateFormat format =
+            new SimpleDateFormat(DATE_PATTERN, Locale.ENGLISH);
+        format.setTimeZone(TimeZone.getTimeZone("GMT"));
+        format.setLenient(false);
+        return format;
+      }
+    };
 
   private void init(HttpServletResponse response) {
     response.setContentType(null);
   }
 
-  private static class DateRange {
-    Long dateStart;
-    Long dateEnd;
+  private static final class DateRange {
+    private Long dateStart;
+    private Long dateEnd;
     private DateRange(Long start, Long end) {
       this.dateStart = start;
       this.dateEnd = end;
@@ -212,6 +213,7 @@ public class TimelineReaderWebServices {
    * @param uId a delimited string containing clusterid, userid, flow name,
    *     flowrun id and app id which are extracted from UID and then used to
    *     query backend(Mandatory path param).
+   * @param entityType Type of entities(Mandatory path param).
    * @param limit Number of entities to return(Optional query param).
    * @param createdTimeStart If specified, matched entities should not be
    *     created before this timestamp(Optional query param).

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
index fc07e51..d12f7e5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -44,15 +44,15 @@ final class TimelineReaderWebServicesUtils {
   /**
    * Parse the passed context information represented as strings and convert
    * into a {@link TimelineReaderContext} object.
-   * @param clusterId
-   * @param userId
-   * @param flowName
-   * @param flowRunId
-   * @param appId
-   * @param entityType
-   * @param entityId
+   * @param clusterId Cluster Id.
+   * @param userId User Id.
+   * @param flowName Flow Name.
+   * @param flowRunId Run id for the flow.
+   * @param appId App Id.
+   * @param entityType Entity Type.
+   * @param entityId Entity Id.
    * @return a {@link TimelineReaderContext} object.
-   * @throws Exception
+   * @throws Exception if any problem occurs during parsing.
    */
   static TimelineReaderContext createTimelineReaderContext(String clusterId,
       String userId, String flowName, String flowRunId, String appId,
@@ -65,17 +65,17 @@ final class TimelineReaderWebServicesUtils {
   /**
    * Parse the passed filters represented as strings and convert them into a
    * {@link TimelineEntityFilters} object.
-   * @param limit
-   * @param createdTimeStart
-   * @param createdTimeEnd
-   * @param relatesTo
-   * @param isRelatedTo
-   * @param infofilters
-   * @param conffilters
-   * @param metricfilters
-   * @param eventfilters
+   * @param limit Limit to number of entities to return.
+   * @param createdTimeStart Created time start for the entities to return.
+   * @param createdTimeEnd Created time end for the entities to return.
+   * @param relatesTo Entities to return must match relatesTo.
+   * @param isRelatedTo Entities to return must match isRelatedTo.
+   * @param infofilters Entities to return must match these info filters.
+   * @param conffilters Entities to return must match these metric filters.
+   * @param metricfilters Entities to return must match these metric filters.
+   * @param eventfilters Entities to return must match these event filters.
    * @return a {@link TimelineEntityFilters} object.
-   * @throws Exception
+   * @throws Exception if any problem occurs during parsing.
    */
   static TimelineEntityFilters createTimelineEntityFilters(String limit,
       String createdTimeStart, String createdTimeEnd, String relatesTo,
@@ -94,11 +94,11 @@ final class TimelineReaderWebServicesUtils {
   /**
    * Parse the passed fields represented as strings and convert them into a
    * {@link TimelineDataToRetrieve} object.
-   * @param confs
-   * @param metrics
-   * @param fields
+   * @param confs confs to retrieve.
+   * @param metrics metrics to retrieve.
+   * @param fields fields to retrieve.
    * @return a {@link TimelineDataToRetrieve} object.
-   * @throws Exception
+   * @throws Exception if any problem occurs during parsing.
    */
   static TimelineDataToRetrieve createTimelineDataToRetrieve(String confs,
       String metrics, String fields) throws Exception {
@@ -192,7 +192,7 @@ final class TimelineReaderWebServicesUtils {
    * should be represented as "key1:value1,key2:value2,key3:value3".
    * @param str delimited string represented as key-value pairs.
    * @param pairsDelim key-value pairs are delimited by this delimiter.
-   * @param keyValuesDelim key and value are delimited by this delimiter.
+   * @param keyValDelim key and value are delimited by this delimiter.
    * @return a map of key-value pairs with both key and value being strings.
    */
   static Map<String, String> parseKeyStrValueStr(String str,
@@ -212,8 +212,8 @@ final class TimelineReaderWebServicesUtils {
    * should be represented as "key1:value1,key2:value2,key3:value3".
    * @param str delimited string represented as key-value pairs.
    * @param pairsDelim key-value pairs are delimited by this delimiter.
-   * @param keyValuesDelim key and value are delimited by this delimiter.
-   * @return a map of key-value pairs with key being a string and value amy
+   * @param keyValDelim key and value are delimited by this delimiter.
+   * @return a map of key-value pairs with key being a string and value, any
    *     object.
    */
   static Map<String, Object> parseKeyStrValueObj(String str,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
index cac3f06..08e5405 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
@@ -204,7 +204,7 @@ enum TimelineUIDConverter {
 
   /**
    * Split UID using {@link #UID_DELIMITER_CHAR} and {@link #UID_ESCAPE_CHAR}.
-   * @param uid
+   * @param uid UID to be splitted.
    * @return a list of different parts of UID split across delimiter.
    * @throws IllegalArgumentException if UID is not properly escaped.
    */
@@ -229,17 +229,19 @@ enum TimelineUIDConverter {
 
   /**
    * Encodes UID depending on UID implementation.
-   * @param context
+   *
+   * @param context Reader context.
    * @return UID represented as a string.
    */
   abstract String encodeUID(TimelineReaderContext context);
 
   /**
    * Decodes UID depending on UID implementation.
-   * @param uId
+   *
+   * @param uId UID to be decoded.
    * @return a {@link TimelineReaderContext} object if UID passed can be
    * decoded, null otherwise.
-   * @throws Exception
+   * @throws Exception if any problem occurs while decoding.
    */
   abstract TimelineReaderContext decodeUID(String uId) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java
index da3c383..f902500 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilterUtils.java
@@ -93,9 +93,11 @@ public final class TimelineFilterUtils {
    * Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList}
    * while converting different timeline filters(of type {@link TimelineFilter})
    * into their equivalent HBase filters.
-   * @param colPrefix
-   * @param filterList
-   * @return a {@link FilterList} object
+   *
+   * @param <T> Describes the type of column prefix.
+   * @param colPrefix column prefix which will be used for conversion.
+   * @param filterList timeline filter list which has to be converted.
+   * @return A {@link FilterList} object.
    */
   public static <T> FilterList createHBaseFilterList(ColumnPrefix<T> colPrefix,
       TimelineFilterList filterList) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/package-info.java
index 51247bd..116509a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/package-info.java
@@ -15,6 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * Package org.apache.hadoop.yarn.server.timelineservice.reader contains classes
+ * which can be used across reader. This package contains classes which are
+ * not related to storage implementations.
+ */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.reader;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
index aa0071f..97e05dd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
@@ -105,15 +105,16 @@ public class FileSystemTimelineReaderImpl extends AbstractService
 
   /**
    * Deserialize a POJO object from a JSON string.
-   * @param clazz
-   *      class to be desirialized
    *
-   * @param jsonString
-   *    json string to deserialize
-   * @return TimelineEntity object
-   * @throws IOException
-   * @throws JsonMappingException
-   * @throws JsonGenerationException
+   * @param <T> Describes the type of class to be returned.
+   * @param clazz class to be deserialized.
+   * @param jsonString JSON string to deserialize.
+   * @return An object based on class type. Used typically for
+   *     <cite>TimelineEntity</cite> object.
+   * @throws IOException if the underlying input source has problems during
+   *     parsing.
+   * @throws JsonMappingException  if parser has problems parsing content.
+   * @throws JsonGenerationException if there is a problem in JSON writing.
    */
   public static <T> T getTimelineRecordFromJSON(
       String jsonString, Class<T> clazz)
@@ -128,33 +129,32 @@ public class FileSystemTimelineReaderImpl extends AbstractService
     }
     for (Field field : fields) {
       switch(field) {
-        case CONFIGS:
-          finalEntity.setConfigs(real.getConfigs());
-          break;
-        case METRICS:
-          finalEntity.setMetrics(real.getMetrics());
-          break;
-        case INFO:
-          finalEntity.setInfo(real.getInfo());
-          break;
-        case IS_RELATED_TO:
-          finalEntity.setIsRelatedToEntities(real.getIsRelatedToEntities());
-          break;
-        case RELATES_TO:
-          finalEntity.setIsRelatedToEntities(real.getIsRelatedToEntities());
-          break;
-        case EVENTS:
-          finalEntity.setEvents(real.getEvents());
-          break;
-        default:
-          continue;
+      case CONFIGS:
+        finalEntity.setConfigs(real.getConfigs());
+        break;
+      case METRICS:
+        finalEntity.setMetrics(real.getMetrics());
+        break;
+      case INFO:
+        finalEntity.setInfo(real.getInfo());
+        break;
+      case IS_RELATED_TO:
+        finalEntity.setIsRelatedToEntities(real.getIsRelatedToEntities());
+        break;
+      case RELATES_TO:
+        finalEntity.setIsRelatedToEntities(real.getIsRelatedToEntities());
+        break;
+      case EVENTS:
+        finalEntity.setEvents(real.getEvents());
+        break;
+      default:
+        continue;
       }
     }
   }
 
-  private String getFlowRunPath(String userId, String clusterId, String flowName,
-      Long flowRunId, String appId)
-      throws IOException {
+  private String getFlowRunPath(String userId, String clusterId,
+      String flowName, Long flowRunId, String appId) throws IOException {
     if (userId != null && flowName != null && flowRunId != null) {
       return userId + "/" + flowName + "/" + flowRunId;
     }
@@ -272,11 +272,11 @@ public class FileSystemTimelineReaderImpl extends AbstractService
     Map<Long, Set<TimelineEntity>> sortedEntities =
         new TreeMap<>(
             new Comparator<Long>() {
-              @Override
-              public int compare(Long l1, Long l2) {
-                return l2.compareTo(l1);
-              }
+            @Override
+            public int compare(Long l1, Long l2) {
+              return l2.compareTo(l1);
             }
+          }
         );
     for (File entityFile : dir.listFiles()) {
       if (!entityFile.getName().contains(TIMELINE_SERVICE_STORAGE_EXTENSION)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
index 4385bbc..74a03ac 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
@@ -76,9 +76,10 @@ public class FileSystemTimelineWriterImpl extends AbstractService
     return response;
   }
 
-  private synchronized void write(String clusterId, String userId, String flowName,
-      String flowVersion, long flowRun, String appId, TimelineEntity entity,
-      TimelineWriteResponse response) throws IOException {
+  private synchronized void write(String clusterId, String userId,
+      String flowName, String flowVersion, long flowRun, String appId,
+      TimelineEntity entity, TimelineWriteResponse response)
+      throws IOException {
     PrintWriter out = null;
     try {
       String dir = mkdirs(outputRoot, ENTITIES_DIR, clusterId, userId,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java
index 9f8257f..a384a84 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineReaderImpl.java
@@ -35,6 +35,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContex
 import org.apache.hadoop.yarn.server.timelineservice.storage.reader.TimelineEntityReader;
 import org.apache.hadoop.yarn.server.timelineservice.storage.reader.TimelineEntityReaderFactory;
 
+/**
+ * HBase based implementation for {@link TimelineReader}.
+ */
 public class HBaseTimelineReaderImpl
     extends AbstractService implements TimelineReader {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
index 2a82ccb..997b175 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
@@ -93,7 +93,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
   }
 
   /**
-   * initializes the hbase connection to write to the entity table
+   * initializes the hbase connection to write to the entity table.
    */
   @Override
   protected void serviceInit(Configuration conf) throws Exception {
@@ -104,7 +104,8 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
     appToFlowTable = new AppToFlowTable().getTableMutator(hbaseConf, conn);
     applicationTable = new ApplicationTable().getTableMutator(hbaseConf, conn);
     flowRunTable = new FlowRunTable().getTableMutator(hbaseConf, conn);
-    flowActivityTable = new FlowActivityTable().getTableMutator(hbaseConf, conn);
+    flowActivityTable =
+        new FlowActivityTable().getTableMutator(hbaseConf, conn);
   }
 
   /**
@@ -289,7 +290,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
   }
 
   /**
-   * Stores the Relations from the {@linkplain TimelineEntity} object
+   * Stores the Relations from the {@linkplain TimelineEntity} object.
    */
   private <T> void storeRelations(byte[] rowKey,
       Map<String, Set<String>> connectedEntities,
@@ -306,7 +307,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
   }
 
   /**
-   * Stores information from the {@linkplain TimelineEntity} object
+   * Stores information from the {@linkplain TimelineEntity} object.
    */
   private void storeInfo(byte[] rowKey, TimelineEntity te, String flowVersion,
       boolean isApplication) throws IOException {
@@ -341,7 +342,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
   }
 
   /**
-   * stores the config information from {@linkplain TimelineEntity}
+   * stores the config information from {@linkplain TimelineEntity}.
    */
   private void storeConfig(byte[] rowKey, Map<String, String> config,
       boolean isApplication) throws IOException {
@@ -351,17 +352,17 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
     for (Map.Entry<String, String> entry : config.entrySet()) {
       if (isApplication) {
         ApplicationColumnPrefix.CONFIG.store(rowKey, applicationTable,
-          entry.getKey(), null, entry.getValue());
+            entry.getKey(), null, entry.getValue());
       } else {
         EntityColumnPrefix.CONFIG.store(rowKey, entityTable, entry.getKey(),
-          null, entry.getValue());
+            null, entry.getValue());
       }
     }
   }
 
   /**
    * stores the {@linkplain TimelineMetric} information from the
-   * {@linkplain TimelineEvent} object
+   * {@linkplain TimelineEvent} object.
    */
   private void storeMetrics(byte[] rowKey, Set<TimelineMetric> metrics,
       boolean isApplication) throws IOException {
@@ -373,10 +374,10 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
           Long timestamp = timeseriesEntry.getKey();
           if (isApplication) {
             ApplicationColumnPrefix.METRIC.store(rowKey, applicationTable,
-              metricColumnQualifier, timestamp, timeseriesEntry.getValue());
+                metricColumnQualifier, timestamp, timeseriesEntry.getValue());
           } else {
             EntityColumnPrefix.METRIC.store(rowKey, entityTable,
-              metricColumnQualifier, timestamp, timeseriesEntry.getValue());
+                metricColumnQualifier, timestamp, timeseriesEntry.getValue());
           }
         }
       }
@@ -384,7 +385,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
   }
 
   /**
-   * Stores the events from the {@linkplain TimelineEvent} object
+   * Stores the events from the {@linkplain TimelineEvent} object.
    */
   private void storeEvents(byte[] rowKey, Set<TimelineEvent> events,
       boolean isApplication) throws IOException {
@@ -428,10 +429,10 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
                         Bytes.toBytes(info.getKey()));
                 if (isApplication) {
                   ApplicationColumnPrefix.EVENT.store(rowKey, applicationTable,
-                    compoundColumnQualifierBytes, null, info.getValue());
+                      compoundColumnQualifierBytes, null, info.getValue());
                 } else {
                   EntityColumnPrefix.EVENT.store(rowKey, entityTable,
-                    compoundColumnQualifierBytes, null, info.getValue());
+                      compoundColumnQualifierBytes, null, info.getValue());
                 }
               } // for info: eventInfo
             }
@@ -459,7 +460,7 @@ public class HBaseTimelineWriterImpl extends AbstractService implements
 
   /**
    * close the hbase connections The close APIs perform flushing and release any
-   * resources held
+   * resources held.
    */
   @Override
   protected void serviceStop() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/OfflineAggregationWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/OfflineAggregationWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/OfflineAggregationWriter.java
index e1219e0..1484f22 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/OfflineAggregationWriter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/OfflineAggregationWriter.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.common.OfflineAggre
 import java.io.IOException;
 
 /**
- * YARN timeline service v2 offline aggregation storage interface
+ * YARN timeline service v2 offline aggregation storage interface.
  */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
@@ -46,8 +46,8 @@ public abstract class OfflineAggregationWriter extends AbstractService {
 
   /**
    * Persist aggregated timeline entities to the offline store based on which
-   * track this entity is to be rolled up to. The tracks along which aggregations
-   * are to be done are given by {@link OfflineAggregationInfo}.
+   * track this entity is to be rolled up to. The tracks along which
+   * aggregations are to be done are given by {@link OfflineAggregationInfo}.
    *
    * @param context a {@link TimelineCollectorContext} object that describes the
    *                context information of the aggregated data. Depends on the
@@ -58,9 +58,10 @@ public abstract class OfflineAggregationWriter extends AbstractService {
    *             detail of the aggregation. Current supported option is
    *             {@link OfflineAggregationInfo#FLOW_AGGREGATION}.
    * @return a {@link TimelineWriteResponse} object.
-   * @throws IOException
+   * @throws IOException if any problem occurs while writing aggregated
+   *     entities.
    */
   abstract TimelineWriteResponse writeAggregatedEntity(
-      TimelineCollectorContext context,
-      TimelineEntities entities, OfflineAggregationInfo info) throws IOException;
+      TimelineCollectorContext context, TimelineEntities entities,
+      OfflineAggregationInfo info) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixOfflineAggregationWriterImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixOfflineAggregationWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixOfflineAggregationWriterImpl.java
index b5834c0..130cb6c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixOfflineAggregationWriterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/PhoenixOfflineAggregationWriterImpl.java
@@ -102,18 +102,18 @@ public class PhoenixOfflineAggregationWriterImpl
   private static final String PHOENIX_COL_FAMILY_PLACE_HOLDER
       = "timeline_cf_placeholder";
 
-  /** Default Phoenix JDBC driver name */
+  /** Default Phoenix JDBC driver name. */
   private static final String DRIVER_CLASS_NAME
       = "org.apache.phoenix.jdbc.PhoenixDriver";
 
-  /** Default Phoenix timeline config column family */
+  /** Default Phoenix timeline config column family. */
   private static final String METRIC_COLUMN_FAMILY = "m.";
-  /** Default Phoenix timeline info column family */
+  /** Default Phoenix timeline info column family. */
   private static final String INFO_COLUMN_FAMILY = "i.";
-  /** Default separator for Phoenix storage */
+  /** Default separator for Phoenix storage. */
   private static final String AGGREGATION_STORAGE_SEPARATOR = ";";
 
-  /** Connection string to the deployed Phoenix cluster */
+  /** Connection string to the deployed Phoenix cluster. */
   private String connString = null;
   private Properties connProperties = new Properties();
 
@@ -162,7 +162,8 @@ public class PhoenixOfflineAggregationWriterImpl
         }
         int idx = info.setStringsForPrimaryKey(ps, context, null, 1);
         ps.setLong(idx++, entity.getCreatedTime());
-        ps.setString(idx++, StringUtils.join(formattedMetrics.keySet().toArray(),
+        ps.setString(idx++,
+            StringUtils.join(formattedMetrics.keySet().toArray(),
             AGGREGATION_STORAGE_SEPARATOR));
         ps.execute();
 
@@ -185,7 +186,7 @@ public class PhoenixOfflineAggregationWriterImpl
    * Create Phoenix tables for offline aggregation storage if the tables do not
    * exist.
    *
-   * @throws IOException
+   * @throws IOException if any problem happens while creating Phoenix tables.
    */
   public void createPhoenixTables() throws IOException {
     // Create tables if necessary
@@ -197,7 +198,8 @@ public class PhoenixOfflineAggregationWriterImpl
           + "(user VARCHAR NOT NULL, cluster VARCHAR NOT NULL, "
           + "flow_name VARCHAR NOT NULL, "
           + "created_time UNSIGNED_LONG, "
-          + METRIC_COLUMN_FAMILY + PHOENIX_COL_FAMILY_PLACE_HOLDER + " VARBINARY, "
+          + METRIC_COLUMN_FAMILY + PHOENIX_COL_FAMILY_PLACE_HOLDER
+          + " VARBINARY, "
           + "metric_names VARCHAR, info_keys VARCHAR "
           + "CONSTRAINT pk PRIMARY KEY("
           + "user, cluster, flow_name))";
@@ -206,7 +208,8 @@ public class PhoenixOfflineAggregationWriterImpl
           + OfflineAggregationInfo.USER_AGGREGATION_TABLE_NAME
           + "(user VARCHAR NOT NULL, cluster VARCHAR NOT NULL, "
           + "created_time UNSIGNED_LONG, "
-          + METRIC_COLUMN_FAMILY + PHOENIX_COL_FAMILY_PLACE_HOLDER + " VARBINARY, "
+          + METRIC_COLUMN_FAMILY + PHOENIX_COL_FAMILY_PLACE_HOLDER
+          + " VARBINARY, "
           + "metric_names VARCHAR, info_keys VARCHAR "
           + "CONSTRAINT pk PRIMARY KEY(user, cluster))";
       stmt.executeUpdate(sql);
@@ -251,9 +254,9 @@ public class PhoenixOfflineAggregationWriterImpl
   private static class DynamicColumns<K> {
     static final String COLUMN_FAMILY_TYPE_BYTES = " VARBINARY";
     static final String COLUMN_FAMILY_TYPE_STRING = " VARCHAR";
-    String columnFamilyPrefix;
-    String type;
-    Set<K> columns;
+    private String columnFamilyPrefix;
+    private String type;
+    private Set<K> columns;
 
     public DynamicColumns(String columnFamilyPrefix, String type,
         Set<K> keyValues) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineAggregationTrack.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineAggregationTrack.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineAggregationTrack.java
index 955ca80..f0b1e47 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineAggregationTrack.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineAggregationTrack.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.server.timelineservice.storage;
 
 /**
  * specifies the tracks along which an entity
- * info is to be aggregated on
+ * info is to be aggregated on.
  *
  */
 public enum TimelineAggregationTrack {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
index ccb33b7..e8eabf1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
@@ -97,7 +97,8 @@ public interface TimelineReader extends Service {
    *    <cite>FlowRunEntity</cite>.<br>
    *    For all other entity types, entity returned is of type
    *    <cite>TimelineEntity</cite>.
-   * @throws IOException
+   * @throws IOException if there is an exception encountered while fetching
+   *    entity from backend storage.
    */
   TimelineEntity getEntity(TimelineReaderContext context,
       TimelineDataToRetrieve dataToRetrieve) throws IOException;
@@ -169,7 +170,8 @@ public interface TimelineReader extends Service {
    *    <cite>FlowRunEntity</cite>.<br>
    *    For all other entity types, entities returned are of type
    *    <cite>TimelineEntity</cite>.
-   * @throws IOException
+   * @throws IOException if there is an exception encountered while fetching
+   *    entity from backend storage.
    */
   Set<TimelineEntity> getEntities(
       TimelineReaderContext context,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db76a3ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
index 46bc2e6..33f5449 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
@@ -53,7 +53,9 @@ import com.google.common.annotations.VisibleForTesting;
  */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
-public class TimelineSchemaCreator {
+public final class TimelineSchemaCreator {
+  private TimelineSchemaCreator() {
+  }
 
   final static String NAME = TimelineSchemaCreator.class.getSimpleName();
   private static final Log LOG = LogFactory.getLog(TimelineSchemaCreator.class);