You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@chukwa.apache.org by ey...@apache.org on 2016/04/03 01:14:41 UTC

[3/3] chukwa git commit: CHUKWA-802. Updated Javadoc for Java 8 support. (Eric Yang)

CHUKWA-802. Updated Javadoc for Java 8 support.  (Eric Yang)


Project: http://git-wip-us.apache.org/repos/asf/chukwa/repo
Commit: http://git-wip-us.apache.org/repos/asf/chukwa/commit/8011ff1f
Tree: http://git-wip-us.apache.org/repos/asf/chukwa/tree/8011ff1f
Diff: http://git-wip-us.apache.org/repos/asf/chukwa/diff/8011ff1f

Branch: refs/heads/master
Commit: 8011ff1f59b4702d1987281fc90b2ca60f2e3a9b
Parents: 8329221
Author: Eric Yang <ey...@apache.org>
Authored: Sat Apr 2 16:14:21 2016 -0700
Committer: Eric Yang <ey...@apache.org>
Committed: Sat Apr 2 16:14:21 2016 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 pom.xml                                         |  46 +-
 .../java/org/apache/hadoop/chukwa/Chunk.java    |   2 +
 .../org/apache/hadoop/chukwa/ChunkBuilder.java  |   2 +-
 .../analysis/salsa/visualization/Heatmap.java   |  11 +-
 .../analysis/salsa/visualization/Swimlanes.java |  10 +-
 .../chukwa/database/MetricsAggregation.java     |   4 +-
 .../chukwa/datacollection/ChunkQueue.java       |   6 +-
 .../chukwa/datacollection/ChunkReceiver.java    |   2 +-
 .../chukwa/datacollection/DataFactory.java      |   2 +
 .../datacollection/OffsetStatsManager.java      |   8 +-
 .../chukwa/datacollection/adaptor/Adaptor.java  |  47 +-
 .../datacollection/adaptor/SyslogAdaptor.java   |   8 +-
 .../adaptor/filetailer/FileTailingAdaptor.java  |   2 -
 .../datacollection/adaptor/jms/JMSAdaptor.java  |  22 +-
 .../jms/JMSMessagePropertyTransformer.java      |   8 +-
 .../adaptor/jms/JMSMessageTransformer.java      |   4 +-
 .../agent/AdaptorResetThread.java               |   2 +-
 .../agent/AgentControlSocketListener.java       |   3 +-
 .../datacollection/agent/ChukwaAgent.java       |  11 +-
 .../agent/metrics/AgentMetrics.java             |   5 +-
 .../agent/metrics/ChunkQueueMetrics.java        |   5 +-
 .../agent/rest/AdaptorController.java           |   5 +
 .../controller/ChukwaAgentController.java       |  27 +-
 .../datacollection/sender/ChukwaHttpSender.java |  12 +-
 .../datacollection/sender/ChukwaSender.java     |   1 +
 .../sender/RetryListOfCollectors.java           |   4 +-
 .../sender/metrics/HttpSenderMetrics.java       |   5 +-
 .../test/FileTailerStressTest.java              |   2 +-
 .../datacollection/writer/ChukwaWriter.java     |  16 +-
 .../datacollection/writer/InMemoryWriter.java   |   2 +-
 .../datacollection/writer/SocketTeeWriter.java  |   8 +-
 .../datacollection/writer/gora/ChukwaChunk.java | 865 ++++++++++++-------
 .../datacollection/writer/gora/GoraWriter.java  |   4 +-
 .../chukwa/dataloader/DataLoaderFactory.java    |   6 +-
 .../chukwa/dataloader/MetricDataLoader.java     |   5 +-
 .../chukwa/datastore/ChukwaHBaseStore.java      | 110 +--
 .../chukwa/datatrigger/HttpTriggerAction.java   |  10 +-
 .../chukwa/extraction/archive/SinkArchiver.java |   2 +-
 .../demux/DailyChukwaRecordRolling.java         |   4 +-
 .../chukwa/extraction/demux/DemuxManager.java   |   2 +-
 .../demux/HourlyChukwaRecordRolling.java        |   4 +-
 .../extraction/demux/MoveOrMergeRecordFile.java |   4 +-
 .../extraction/demux/MoveToRepository.java      |   4 +-
 .../processor/mapper/ProcessorFactory.java      |   2 +
 .../reducer/ReduceProcessorFactory.java         |   2 +
 .../chukwa/extraction/engine/RecordUtil.java    |   4 +-
 .../extraction/hbase/AbstractProcessor.java     |  24 +-
 .../hbase/ChukwaMetricsProcessor.java           |   8 +-
 .../chukwa/hicc/rest/ChartController.java       |  15 +-
 .../chukwa/hicc/rest/CirclesController.java     |   4 +-
 .../chukwa/hicc/rest/SessionController.java     |   3 +-
 .../log4j/ChukwaDailyRollingFileAppender.java   |  37 +-
 .../chukwa/rest/resource/ClientTrace.java       |   6 +-
 .../tools/backfilling/BackfillingLoader.java    |   4 +-
 .../apache/hadoop/chukwa/util/DumpArchive.java  |   6 +-
 .../apache/hadoop/chukwa/util/DumpChunks.java   |   7 +-
 .../apache/hadoop/chukwa/util/DumpRecord.java   |   6 +-
 .../hadoop/chukwa/util/HierarchyDataType.java   |  17 +-
 .../apache/hadoop/chukwa/util/RegexUtil.java    |  59 +-
 .../apache/hadoop/chukwa/util/XssFilter.java    |   4 +-
 .../metrics/spi/AbstractMetricsContext.java     |   7 +-
 src/main/web/hicc/home/css/jquery.gridster.css  |   9 +
 src/site/apt/user.apt                           | 325 -------
 .../chukwa/extraction/archive/TestArchive.java  |   1 +
 65 files changed, 947 insertions(+), 917 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 203e95c..e44c58d 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -6,6 +6,8 @@ Trunk (unreleased changes)
 
   IMPROVEMENTS
 
+    CHUKWA-802. Updated Javadoc for Java 8 support.  (Eric Yang)
+
   BUGS
 
     CHUKWA-801. Increase measurement timeout for TestFailTailer. (Eric Yang)

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7b16469..029a3d3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -514,9 +514,9 @@
                       <goals>
                         <goal>testCompile</goal>
                       </goals>
-                      <compilerVersion>1.6</compilerVersion>
-                      <source>1.6</source>
-                      <target>1.6</target>
+                      <compilerVersion>1.8</compilerVersion>
+                      <source>1.8</source>
+                      <target>1.8</target>
                       <testExcludes>
                         <exclude>**/ChukwaJobTrackerInstrumentation.java</exclude>
                         <exclude>**/TestDemuxManager.java</exclude>
@@ -1498,7 +1498,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-project-info-reports-plugin</artifactId>
-          <version>2.4</version>
+          <version>2.9</version>
           <configuration>
             <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
           </configuration>
@@ -1520,6 +1520,44 @@
               <configuration>
                 <aggregate>true</aggregate>
                 <doctitle>${project.name} API ${project.version}</doctitle>
+                <show>public</show>
+                <tags>
+                  <tag>
+                    <name>response.representation.200.doc</name>
+                    <placement>a</placement>
+                    <head>response.representation.200.doc</head>
+                  </tag>
+                  <tag>
+                    <name>response.representation.200.mediaType</name>
+                    <placement>a</placement>
+                    <head>response.representation.200.mediaType</head>
+                  </tag>
+                  <tag>
+                    <name>response.representation.200.example</name>
+                    <placement>a</placement>
+                    <head>response.representation.200.example</head>
+                  </tag>
+                  <tag>
+                    <name>response.representation.400.doc</name>
+                    <placement>a</placement>
+                    <head>response.representation.400.doc</head>
+                  </tag>
+                  <tag>
+                    <name>response.representation.400.mediaType</name>
+                    <placement>a</placement>
+                    <head>response.representation.400.mediaType</head>
+                  </tag>
+                  <tag>
+                    <name>response.representation.400.example</name>
+                    <placement>a</placement>
+                    <head>response.representation.400.example</head>
+                  </tag>
+                  <tag>
+                    <name>request.representation.example</name>
+                    <placement>a</placement>
+                    <head>request.representation.example</head>
+                  </tag>
+                </tags>
               </configuration>
               <reports>
                 <report>javadoc</report>

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/Chunk.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/Chunk.java b/src/main/java/org/apache/hadoop/chukwa/Chunk.java
index f1a208e..298aaed 100644
--- a/src/main/java/org/apache/hadoop/chukwa/Chunk.java
+++ b/src/main/java/org/apache/hadoop/chukwa/Chunk.java
@@ -91,6 +91,7 @@ public interface Chunk {
   /**
    * Retrieve a reference to the adaptor that sent this event. Used by
    * LocalAgent and Connectors to deliver acks to the appropriate place.
+   * @return Adaptor
    */
   public Adaptor getInitiator();
 
@@ -110,6 +111,7 @@ public interface Chunk {
 
   /**
    * Add tag.
+   * @param tag is a comma separated list
    * 
    */
   public void addTag(String tag);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/ChunkBuilder.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/ChunkBuilder.java b/src/main/java/org/apache/hadoop/chukwa/ChunkBuilder.java
index ccf2de6..f7de5b4 100644
--- a/src/main/java/org/apache/hadoop/chukwa/ChunkBuilder.java
+++ b/src/main/java/org/apache/hadoop/chukwa/ChunkBuilder.java
@@ -36,7 +36,7 @@ public class ChunkBuilder {
   /**
    * Adds the data in rec to an internal buffer; rec can be reused immediately.
    * 
-   * @param rec
+   * @param rec is byte array of data
    */
   public void addRecord(byte[] rec) {
     lastRecOffset = lastRecOffset + rec.length;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java b/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
index 37f341c..9cbe319 100644
--- a/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
+++ b/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
@@ -130,10 +130,11 @@ public class Heatmap {
   }
   
   /**
-   * @brief Constructor for Swimlanes visualization object
+   * Constructor for Swimlanes visualization object
    * @param timezone Timezone string from environment
    * @param cluster Cluster name from environment
    * @param event_type Whether to display shuffles or not
+   * @param query_stat_type Query state type
    * @param valmap HashMap of key/value pairs simulating parameters from a HttpRequest
    */
   public Heatmap
@@ -227,6 +228,8 @@ public class Heatmap {
   /**
    * Set dimensions of image to be generated
    * Call before calling @see #run
+   * @param width Image width in pixels
+   * @param height Image height in pixels
    */
   public void setDimensions(int width, int height) {
     this.SIZE_X=width;
@@ -236,6 +239,7 @@ public class Heatmap {
   /**
    * Specify whether to print labels of hosts along axes
    * Call before calling @see #run
+   * @param legendopt Flag to control plot legends
    */
   public void setLegend(boolean legendopt) {
     if (legendopt) {
@@ -249,6 +253,10 @@ public class Heatmap {
   /**
    * Generates image in specified format, and writes image as binary
    * output to supplied output stream 
+   * @param output Image output stream
+   * @param img_fmt Image format
+   * @param scale Image scale
+   * @return true if image is saved
    */
   public boolean getImage(java.io.OutputStream output, String img_fmt, double scale) {
     dis = new Display(this.viz);
@@ -526,6 +534,7 @@ public class Heatmap {
   /**
    * Interfaces with database to get data and 
    * populate data structures for rendering
+   * @return heat map data JSON
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
       "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java b/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
index 07d9576..c2e0e32 100644
--- a/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
+++ b/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
@@ -443,7 +443,7 @@ public class Swimlanes {
   }
   
   /**
-   * @brief Constructor for Swimlanes visualization object
+   * Constructor for Swimlanes visualization object
    * @param timezone Timezone string from environment
    * @param cluster Cluster name from environment
    * @param event_type Whether to display shuffles or not
@@ -533,6 +533,7 @@ public class Swimlanes {
   /**
    * Set job ID to filter results on
    * Call before calling @see #run
+   * @param s job name
    */
   public void setJobName(String s) {
     this.jobname = s;
@@ -541,6 +542,8 @@ public class Swimlanes {
   /**
    * Set dimensions of image to be generated
    * Call before calling @see #run
+   * @param width image width in pixels
+   * @param height image height in pixels
    */  
   public void setDimensions(int width, int height) {
     this.SIZE_X=width;
@@ -552,6 +555,7 @@ public class Swimlanes {
    * Advisable to not print legend for excessively small images since
    * legend has fixed point size
    * Call before calling @see #run
+   * @param legendopt parameter to turn on legends
    */
   public void setLegend(boolean legendopt) {
     if (legendopt) {
@@ -564,6 +568,10 @@ public class Swimlanes {
   /**
    * Generates image in specified format, and writes image as binary
    * output to supplied output stream 
+   * @param output output stream of image
+   * @param img_fmt image format
+   * @param scale image scaling factor
+   * @return true if image is saved
    */
   public boolean getImage(java.io.OutputStream output, String img_fmt, double scale) {
     dis = new Display(this.viz);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java b/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
index 9d8e8ba..bf39757 100644
--- a/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
+++ b/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
@@ -38,8 +38,8 @@ public class MetricsAggregation {
   private static DatabaseConfig mdlConfig;
 
   /**
-   * @param args
-   * @throws SQLException
+   * @param args is list of command line parameters
+   * @throws SQLException if SQL query fails
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
       "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkQueue.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkQueue.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkQueue.java
index 5ed8e4a..677e18a 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkQueue.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkQueue.java
@@ -31,7 +31,7 @@ public interface ChunkQueue extends ChunkReceiver {
   /**
    * Add a chunk to the queue, blocking if queue is full.
    * 
-   * @param chunk
+   * @param chunk A binary blob
    * @throws InterruptedException if thread is interrupted while blocking
    */
   public void add(Chunk chunk) throws InterruptedException;
@@ -39,6 +39,9 @@ public interface ChunkQueue extends ChunkReceiver {
   /**
    * Return at least one, and no more than count, Chunks into chunks. Blocks if
    * queue is empty.
+   * @param chunks List of binary blobs
+   * @param count maximum number of chunk to return
+   * @throws InterruptedException if thread is interrupted while collecting
    */
   public void collect(List<Chunk> chunks, int count)
       throws InterruptedException;
@@ -46,6 +49,7 @@ public interface ChunkQueue extends ChunkReceiver {
   /**
    * Return an approximation of the number of chunks in the queue currently. No
    * guarantees are made about the accuracy of this number.
+   * @return number of chunks in the queue currently
    */
   public int size();
 }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkReceiver.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkReceiver.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkReceiver.java
index ba71387..d105298 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkReceiver.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/ChunkReceiver.java
@@ -25,7 +25,7 @@ public interface ChunkReceiver {
   /**
    * Add a chunk to the queue, potentially blocking.
    * 
-   * @param event
+   * @param event is a Chukwa Chunk
    * @throws InterruptedException if thread is interrupted while blocking
    */
   public void add(Chunk event) throws java.lang.InterruptedException;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/DataFactory.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/DataFactory.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/DataFactory.java
index f27b806..405f030 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/DataFactory.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/DataFactory.java
@@ -107,6 +107,8 @@ public class DataFactory {
   }
   
   /**
+   * @param conf is Chukwa configuration
+   * @param filename is collector list
    * @return empty list if file does not exist
    * @throws IOException on other error
    */

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/OffsetStatsManager.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/OffsetStatsManager.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/OffsetStatsManager.java
index 497f738..725b9b9 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/OffsetStatsManager.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/OffsetStatsManager.java
@@ -23,20 +23,20 @@ import java.util.Map;
 import java.util.LinkedList;
 import java.util.Date;
 import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
 
 /**
  * Manages stats for multiple objects of type T. T can be any class that is used
  * as a key for offset statistics (i.e. Agent, Collector, etc.). A client would
- * create an instance of this class and call <code>addOffsetDataPoint<code>
+ * create an instance of this class and call <code>addOffsetDataPoint</code>
  * repeatedly over time. Then <code>calcAverageRate</code> can be called to
  * retrieve the average offset-unit per second over a given time interval.
  * <P>
  * For a given object T that is actively adding data points, stats are kept for
- * up to 20 minutes.
+ * up to 20 minutes.</p>
  * <P>
  * Care should be taken to always call <code>remove()</code> when old T objects
- * should no longer be tracked.
+ * should no longer be tracked.</p>
+ * @param <T> is object type
  */
 public class OffsetStatsManager<T> {
   protected Logger log = Logger.getLogger(getClass());

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/Adaptor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/Adaptor.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/Adaptor.java
index 2d92960..f224368 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/Adaptor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/Adaptor.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
  * offset in the stream.
  * 
  * If an adaptor crashes at byte offset n, and is restarted at byte offset k,
- * with k < n, it is allowed to send different values for bytes k through n the
+ * with k &lt; n, it is allowed to send different values for bytes k through n the
  * second time around. However, the stream must still be parseable, assuming
  * that bytes 0-k come from the first run,and bytes k - n come from the second.
  * 
@@ -48,11 +48,12 @@ import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
 public interface Adaptor {
   /**
    * Start this adaptor
+   * @param adaptorID Adaptor ID
    * 
    * @param type the application type, who is starting this adaptor
-   * @param status the status string to use for configuration.
    * @param offset the stream offset of the first byte sent by this adaptor
-   * @throws AdaptorException
+   * @param dest Chunk receiving destination
+   * @throws AdaptorException if adaptor can not be started
    */
   public void start(String adaptorID, String type, long offset,
       ChunkReceiver dest) throws AdaptorException;
@@ -74,50 +75,22 @@ public interface Adaptor {
    * Return the stream name, given params.
    * The stream name is the part of the Adaptor status that's used to 
    * determine uniqueness. 
+   * @param datatype Data type
+   * @param params Adaptor parameters
+   * @param c Adaptor Manager
    * 
    * @return Stream name as a string, null if params are malformed
    */
   public String parseArgs(String datatype, String params, AdaptorManager c);
   
- 
-  
   /**
    * Signals this adaptor to come to an orderly stop. The adaptor ought to push
    * out all the data it can before exiting depending of the shutdown policy
+   * @param shutdownPolicy is defined as forcefully or gracefully
    * 
    * @return the logical offset at which the adaptor was when the method return
-   * @throws AdaptorException
+   * @throws AdaptorException Exception on shutdown
    */
   public long shutdown(AdaptorShutdownPolicy shutdownPolicy) throws AdaptorException;
-  
-  /**
-   * Signals this adaptor to come to an orderly stop. The adaptor ought to push
-   * out all the data it can before exiting.
-   * 
-   * This method is synchronous up to 60 seconds
-   * 
-   * @return the logical offset at which the adaptor stops
-   * @throws AdaptorException
-   */
-//  @Deprecated
-//  public long shutdown() throws AdaptorException;
-
-  
-  /**
-   * Signals this adaptor to come to an abrupt stop, as quickly as it can. The
-   * use case here is "Whups, I didn't mean to start that adaptor tailing a
-   * gigabyte file, stop it now".
-   * 
-   * Adaptors might need to do something nontrivial here, e.g., in the case in
-   * which they have registered periodic timer interrupts, or use a shared
-   * worker thread from which they need to disengage.
-   * 
-   * This method is synchronous: In other words, after shutdown() returns, no
-   * new data should be written.
-   * 
-   * @throws AdaptorException
-   */
-//  @Deprecated
-//  public void hardStop() throws AdaptorException;
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SyslogAdaptor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SyslogAdaptor.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SyslogAdaptor.java
index 50dec64..6ca4d14 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SyslogAdaptor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SyslogAdaptor.java
@@ -38,10 +38,10 @@ import org.apache.log4j.Logger;
  * 
  * Data Type mapping can be overwritten in Chukwa Agent Configuration file, i.e.:
  * 
- * <property>
- *   <name>syslog.adaptor.port.9095.facility.LOCAL1</name>
- *   <value>HADOOP</value>
- * </property>
+ * &lt;property&gt;
+ *   &lt;name&gt;syslog.adaptor.port.9095.facility.LOCAL1&lt;/name&gt;
+ *   &lt;value&gt;HADOOP&lt;/value&gt;
+ * &lt;/property&gt;
  * 
  * When demux takes place, data received on port 9095 with facility name LOCAL0 will
  * be processed by demux parser for data type "HADOOP".

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
index 9fc25b9..ff1e60b 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
@@ -113,8 +113,6 @@ public class FileTailingAdaptor extends LWFTAdaptor {
    * Looks at the tail of the associated file, adds some of it to event queue
    * This method is not thread safe. Returns true if there's more data in the
    * file
-   * 
-   * @param eq the queue to write Chunks into
    */
   @Override
   public boolean tailFile()

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSAdaptor.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSAdaptor.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSAdaptor.java
index 79f8db6..ff07917 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSAdaptor.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSAdaptor.java
@@ -49,8 +49,8 @@ import javax.jms.MessageConsumer;
  * <P>
  * This adaptor is added to an Agent like so:
  * <code>
- * add JMSAdaptor <dataType> <brokerURL> <-t <topicName>|-q <queueName>> [-s <JMSSelector>]
- *  [-x <transformerName>] [-p <transformerConfigs>] <offset>
+ * add JMSAdaptor &lt;dataType&gt; &lt;brokerURL&gt; &lt;-t &lt;topicName&gt; |-q &lt;queueName&gt; [-s &lt;JMSSelector&gt;]
+ *  [-x &lt;transformerName&gt;] [-p &lt;transformerConfigs&gt;] &lt;offset&gt;
  * </code>
  * <ul>
  * <li><code>dataType</code> - The chukwa data type.</li>
@@ -119,11 +119,11 @@ public class JMSAdaptor extends AbstractAdaptor {
 
   /**
    * This adaptor received configuration like this:
-   * <brokerURL> <-t <topicName>|-q <queueName>> [-s <JMSSelector>] [-x <transformerName>]
-   * [-p <transformerProperties>]
+   * &lt;brokerURL&gt; &lt;-t &lt;topicName&gt;|-q &lt;queueName&gt;&gt; [-s &lt;JMSSelector&gt;] [-x &lt;transformerName&gt;]
+   * [-p &lt;transformerProperties&gt;]
    *
-   * @param s
-   * @return
+   * @param s is a list of parameters
+   * @return Adaptor ID
    */
   @Override
   public String parseArgs(String s) {
@@ -273,17 +273,17 @@ public class JMSAdaptor extends AbstractAdaptor {
 
   /**
    * Status is used to write checkpoints. Checkpoints are written as:
-   * ADD <adaptorKey> = <adaptorClass> <currentStatus> <offset>
+   * ADD &lt;adaptorKey&gt; = &lt;adaptorClass&gt; &lt;currentStatus&gt; &lt;offset&gt;
    *
    * Once they're reloaded, adaptors are re-initialized with
-   * <adaptorClass> <currentStatus> <offset>
+   * &lt;adaptorClass&gt; &lt;currentStatus&gt; &lt;offset&gt;
    *
    * While doing so, this gets passed by to the parseArgs method:
-   * <currentStatus>
+   * &lt;currentStatus&gt;
    *
-   * Without the first token in <currentStatus>, which is expected to be <dataType>.
+   * Without the first token in &lt;currentStatus&gt;, which is expected to be &lt;dataType&gt;.
    *
-   * @return
+   * @return Adaptor status
    */
   @Override
   public String getCurrentStatus() {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
index facff2d..b8689bb 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
@@ -35,7 +35,7 @@ import java.util.ArrayList;
  * To configure this transformer, set the -p field of the adaptor to the
  * following (surrounded with double quotes):
  * <code>
- * <propertyNames> [-d <delimiter>] [-r <requiredPropertyNames>]
+ * &lt;propertyNames&gt; [-d &lt;delimiter&gt;] [-r &lt;requiredPropertyNames&gt;]
  * </code>
  * <ul>
  * <li><code>propertyNames</code> - Comma-separated list of JMS properties.</li>
@@ -107,9 +107,9 @@ public class JMSMessagePropertyTransformer implements JMSMessageTransformer {
    * all of the configured message properties are not found, returns null.
    * <P>
    * The could be enhanced to support the concept of optional/required properties.
-   * @param message
-   * @return
-   * @throws JMSException
+   * @param message is data to be transported
+   * @return byte array
+   * @throws JMSException if problem transforming data
    */
   public byte[] transform(Message message) throws JMSException {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessageTransformer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessageTransformer.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessageTransformer.java
index de2328f..bf9056c 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessageTransformer.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessageTransformer.java
@@ -33,7 +33,7 @@ public interface JMSMessageTransformer {
    * passed. If they weren't, this method will never be called.
    *
    * @param args Arguments needed to configur the transformer.
-   * @return
+   * @return adaptor id
    */
   public String parseArgs(String args);
 
@@ -43,7 +43,7 @@ public interface JMSMessageTransformer {
    *
    * @param message JMS message received by a JMS Adaptor.
    * @return the bytes that should be bound to the Chukwa chunk.
-   * @throws JMSException
+   * @throws JMSException if there is problem process message
    */
   public byte[] transform(Message message) throws JMSException;
 }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
index 88ba9bc..85526f8 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
@@ -63,7 +63,7 @@ public class AdaptorResetThread extends Thread {
   
   /**
    * Resets all adaptors with outstanding data more than timeSinceLastCommit old.
-   * @param timeSinceLastCommit
+   * @param timeSinceLastCommit is millisecond since last check point
    * @return the number of reset adaptors
    */
   public int resetTimedOutAdaptors(int timeSinceLastCommit) {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
index d024180..faf39f9 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
@@ -205,8 +205,7 @@ public class AgentControlSocketListener extends Thread {
 
   /**
    * Initializes listener, but does not bind to socket.
-   * 
-   * @param a the agent to control
+   * @param agent the agent to control
    */
   public AgentControlSocketListener(ChukwaAgent agent) {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
index 0d201e7..be619f3 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
@@ -256,8 +256,8 @@ public class ChukwaAgent implements AdaptorManager {
   }
 
   /**
-   * @param args
-   * @throws AdaptorException
+   * @param args is command line arguements
+   * @throws AdaptorException if error registering adaptors
    */
   public static void main(String[] args) throws AdaptorException {
 
@@ -604,7 +604,7 @@ public class ChukwaAgent implements AdaptorManager {
 /**
  * Expose the adaptor list.  Keys are adaptor ID numbers, values are the 
  * adaptor status strings.
- * @return
+ * @return adaptor list
  */
   public Map<String, String> getAdaptorList() {
     Map<String, String> adaptors = new HashMap<String, String>(adaptorsByName.size());
@@ -700,8 +700,9 @@ public class ChukwaAgent implements AdaptorManager {
   /**
    * Triggers agent shutdown. For now, this method doesn't shut down adaptors
    * explicitly. It probably should.
+   * @param force sets flag to exit forcefully
    */
-  public void shutdown(boolean exit) {
+  public void shutdown(boolean force) {
     controlSock.shutdown(); // make sure we don't get new requests
 
     if (statsCollector != null) {
@@ -739,7 +740,7 @@ public class ChukwaAgent implements AdaptorManager {
     adaptorPositions.clear();
     adaptorStatsManager.clear();
     agent.stop();
-    if (exit)
+    if (force)
       return;
   }
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/AgentMetrics.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/AgentMetrics.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/AgentMetrics.java
index 34bd2ed..a161bc7 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/AgentMetrics.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/AgentMetrics.java
@@ -42,7 +42,10 @@ public class AgentMetrics implements Updater {
   public MetricsTimeVaryingInt removedAdaptor =
     new MetricsTimeVaryingInt("removedAdaptor", registry,"number of removed adaptor");
   
-  /** Creates a new instance of AgentMetrics */
+  /** Creates a new instance of AgentMetrics 
+   * @param processName is jvm name of agent process
+   * @param recordName is mbean record name
+   **/
   public AgentMetrics(String processName, String recordName) {
       MetricsContext context = MetricsUtil.getContext(processName);
       metricsRecord = MetricsUtil.createRecord(context, recordName);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/ChunkQueueMetrics.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/ChunkQueueMetrics.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/ChunkQueueMetrics.java
index 930358f..813ffe7 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/ChunkQueueMetrics.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/metrics/ChunkQueueMetrics.java
@@ -50,7 +50,10 @@ public class ChunkQueueMetrics implements Updater {
     new MetricsIntValue("fullQueue", registry,"Queue is full");
   
   
-  /** Creates a new instance of QueueMetrics */
+  /** Creates a new instance of QueueMetrics 
+   * @param processName is jvm process of Agent process
+   * @param recordName is mbean record name
+   * */
   public ChunkQueueMetrics(String processName, String recordName) {
       MetricsContext context = MetricsUtil.getContext(processName);
       metricsRecord = MetricsUtil.createRecord(context, recordName);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
index dc44975..7e4a696 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
@@ -57,6 +57,8 @@ public class AdaptorController {
 
   /**
    * Adds an adaptor to the agent and returns the adaptor info
+   * @param ac is adaptor configuration
+   * @return web status
    * 
    * @request.representation.example {@link Examples#CREATE_ADAPTOR_SAMPLE}
    * @response.representation.200.doc Adaptor has been registered
@@ -99,6 +101,7 @@ public class AdaptorController {
    * Remove an adaptor from the agent
    *
    * @param adaptorId id of adaptor to remove.
+   * @return web status
    * @response.representation.200.doc Delete adaptor by id
    * @response.representation.200.mediaType text/plain
    */
@@ -125,6 +128,7 @@ public class AdaptorController {
 
   /**
    * Get all adaptors
+   * @return web status
    * 
    * @response.representation.200.doc List all configured adaptors
    * @response.representation.200.mediaType application/json
@@ -140,6 +144,7 @@ public class AdaptorController {
    * Get a single adaptor
    * 
    * @param adaptorId id of the adaptor to return
+   * @return web status
    * @response.representation.200.doc Adaptor status and data transfer rate in 1, 5, 10 minutes averages
    * @response.representation.200.mediaType application/json
    * @response.representation.200.example {@link Examples#ADAPTOR_STATUS_SAMPLE}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
index 69e3566..7ab915f 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
@@ -119,7 +119,7 @@ public class ChukwaAgentController {
      * 
      * @return The id of the this {@link Adaptor}, assigned by the agent
      *         upon successful registration
-     * @throws IOException
+     * @throws IOException if problem bind to agent controller port
      */
     String register() throws IOException {
       Socket s = new Socket(hostname, portno);
@@ -230,6 +230,10 @@ public class ChukwaAgentController {
    * doesn't crash if it's attempt to register an adaptor fails. This call does
    * not retry a conection. for that use the overloaded version of this which
    * accepts a time interval and number of retries
+   * @param adaptorName is adaptor class name
+   * @param type is data type
+   * @param params is adaptor specific parameters
+   * @param offset is starting sequence id
    * 
    * @return the id number of the adaptor, generated by the agent
    */
@@ -246,6 +250,13 @@ public class ChukwaAgentController {
    * Registers a new adaptor. Makes no guarantee about success. On failure, to
    * connect to server, will retry <code>numRetries</code> times, every
    * <code>retryInterval</code> milliseconds.
+   * @param adaptorID is unique adaptor identifier
+   * @param adaptorName is adaptor class name
+   * @param type is user defined data type name
+   * @param params is adaptor specific configuration
+   * @param offset is starting sequence id
+   * @param numRetries is number of retries
+   * @param retryInterval is time between retries
    * 
    * @return the id number of the adaptor, generated by the agent
    */
@@ -399,9 +410,11 @@ public class ChukwaAgentController {
    * {@link ChukwaAgentController#pauseFile(String, String)} and
    * {@link ChukwaAgentController#resumeFile(String, String)} which will store
    * the adaptors metadata and re-use them to pick up where it left off.
+   * @param appType is user defined name for the data stream
    * 
-   * @param type the datatype associated with the file to pass through
    * @param filename of the file for the tail adaptor to start monitoring
+   * @param numRetries is number of retries
+   * @param retryInterval is time between retries
    * @return the id number of the adaptor, generated by the agent
    */
   public String addFile(String appType, String filename, long numRetries,
@@ -441,11 +454,11 @@ public class ChukwaAgentController {
    * but we store it state so that we can re-launch a new adaptor with the same
    * state later.
    * 
-   * @param appType
-   * @param filename
+   * @param appType is application type
+   * @param filename is file name suffix pattern
    * @return array of adaptorID numbers which have been created and assigned the
    *         state of the formerly paused adaptors
-   * @throws IOException
+   * @throws IOException if error pausing adaptors
    */
   public Collection<String> pauseFile(String appType, String filename)
       throws IOException {
@@ -478,12 +491,12 @@ public class ChukwaAgentController {
   /**
    * Resume all adaptors for this filename that have been paused
    * 
-   * @param appType the appType
+   * @param appType is application type
    * @param filename filename by which to lookup adaptors which are paused (and
    *        tailing this file)
    * @return an array of the new adaptor ID numbers which have resumed where the
    *         old adaptors left off
-   * @throws IOException
+   * @throws IOException if unable to resume all adaptors
    */
   public Collection<String> resumeFile(String appType, String filename)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
index 76727fe..49b393b 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
@@ -193,7 +193,7 @@ public class ChukwaHttpSender implements ChukwaSender {
   /**
    * Set up a list of connectors for this client to send {@link Chunk}s to
    * 
-   * @param collectors
+   * @param collectors is a list of collectors
    */
   public void setCollectors(Iterator<String> collectors) {
     this.collectors = collectors;
@@ -268,8 +268,8 @@ public class ChukwaHttpSender implements ChukwaSender {
    * @param method the data to push
    * @param expectedCommitResults the list
    * @return the list of committed chunks
-   * @throws IOException
-   * @throws InterruptedException
+   * @throws IOException if error writing
+   * @throws InterruptedException if shutdown has been initiated
    */
   public List<CommitListEntry> postAndParseResponse(PostMethod method, 
         List<CommitListEntry> expectedCommitResults)
@@ -280,9 +280,9 @@ public class ChukwaHttpSender implements ChukwaSender {
 
   /**
    *  Responsible for executing the supplied method on at least one collector
-   * @param method
-   * @return
-   * @throws InterruptedException
+   * @param method is HTTP method
+   * @return the list of commited status
+   * @throws InterruptedException if shutdown has been initiated
    * @throws IOException if no collector responds with an OK
    */
   protected List<String> reliablySend(HttpMethodBase method, String pathSuffix) throws InterruptedException, IOException {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaSender.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaSender.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaSender.java
index 0a46ea1..d816aec 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaSender.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaSender.java
@@ -34,6 +34,7 @@ public interface ChukwaSender {
    * @param chunksToSend a list of chunks to commit
    * @return the list of committed chunks
    * @throws InterruptedException if interrupted while trying to send
+   * @throws java.io.IOException when writing fails
    */
   public List<CommitListEntry> send(List<Chunk> chunksToSend)
       throws InterruptedException, java.io.IOException;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
index c636ad2..25386bb 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
@@ -95,8 +95,8 @@ public class RetryListOfCollectors implements Iterator<String>, Cloneable {
   /**
    * This is only used for debugging. Possibly it should sanitize urls the same way the other
    * constructor does.
-   * @param collectors
-   * @param maxRetryRateMs
+   * @param collectors is list of collector hostname
+   * @param conf is Chukwa configuration
    */
   public RetryListOfCollectors(final List<String> collectors, Configuration conf) {
     this(conf);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/metrics/HttpSenderMetrics.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/metrics/HttpSenderMetrics.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/metrics/HttpSenderMetrics.java
index e223736..306c0bc 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/metrics/HttpSenderMetrics.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/metrics/HttpSenderMetrics.java
@@ -47,7 +47,10 @@ public class HttpSenderMetrics implements Updater {
   public MetricsTimeVaryingInt httpTimeOutException =
     new MetricsTimeVaryingInt("httpTimeOutException", registry,"number of HTTP TimeOutException");
   
-  /** Creates a new instance of HttpSenderMetrics */
+  /** Creates a new instance of HttpSenderMetrics 
+   * @param processName is jvm process name
+   * @param recordName is Hadoop metrics data type
+   * */
   public HttpSenderMetrics(String processName, String recordName) {
       MetricsContext context = MetricsUtil.getContext(processName);
       metricsRecord = MetricsUtil.createRecord(context, recordName);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FileTailerStressTest.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FileTailerStressTest.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FileTailerStressTest.java
index e00229a..4523103 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FileTailerStressTest.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FileTailerStressTest.java
@@ -73,7 +73,7 @@ public class FileTailerStressTest {
   static int FILES_TO_USE = 100;
 
   /**
-   * @param args
+   * @param args is command line parameters
    */
   public static void main(String[] args) {
     try {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/ChukwaWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/ChukwaWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/ChukwaWriter.java
index 827c35a..86e040b 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/ChukwaWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/ChukwaWriter.java
@@ -35,9 +35,9 @@ public interface ChukwaWriter {
    * COMMIT_PENDING should be returned if a writer has written data, but
    * this data may ultimately disappear. Contains a list of strings, format
    * unspecified, that agents can use to find out, eventually, if their data 
-   * has committed.  String <n> corresponds to the nth chunk passed to add().
+   * has committed.  String &lt;n&gt; corresponds to the nth chunk passed to add().
    * 
-   *  At present, the format is <sinkfilename> <offset>, 
+   *  At present, the format is &lt;sinkfilename&gt; &lt;offset&gt;, 
    *  where sinkfilename is the name of a sinkfile, without directory but with
    *  .done suffix, and offset is the last byte of the associated chunk.
    */
@@ -56,8 +56,8 @@ public interface ChukwaWriter {
   /**
    * Called once to initialize this writer.
    * 
-   * @param c
-   * @throws WriterException
+   * @param c is Chukwa configuration
+   * @throws WriterException if error writing data
    */
   public void init(Configuration c) throws WriterException;
 
@@ -67,16 +67,16 @@ public interface ChukwaWriter {
    * Subclasses may assume that init() will be called before any calls to
    * add(), and that add() won't be called after close().
    * 
-   * @param chunks
-   * @return
-   * @throws WriterException
+   * @param chunks is a list of data to send
+   * @return CommitStatus
+   * @throws WriterException if error writing data
    */
   public CommitStatus add(List<Chunk> chunks) throws WriterException;
 
   /**
    * Called once, indicating that the writer should close files and prepare
    * to exit.
-   * @throws WriterException
+   * @throws WriterException if error writing data
    */
   public void close() throws WriterException;
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/InMemoryWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/InMemoryWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/InMemoryWriter.java
index 4d9e2a0..c28f7a6 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/InMemoryWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/InMemoryWriter.java
@@ -62,7 +62,7 @@ public class InMemoryWriter implements ChukwaWriter {
    * @param bytes amount to try to read
    * @param ms time to wait
    * @return a newly read-in chunk
-   * @throws IOException
+   * @throws IOException if error reading data
    */
   public Chunk readOutChunk(int bytes, int ms) throws IOException {
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/8011ff1f/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
index 88ec861..67ee90d 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
@@ -40,16 +40,16 @@ import org.apache.hadoop.chukwa.util.ExceptionUtil;
  * Defaults to 9094
  * 
  * Protocol is as follows:
- * Client ---> TeeWriter   "RAW | WRITABLE <filter>" 
+ * Client ---&gt; TeeWriter   "RAW | WRITABLE &lt;filter&gt;" 
  *                  as per DumpChunks.
  *                  
- * TeeWriter ---> Client "OK\n"                 
+ * TeeWriter ---&gt; Client "OK\n"                 
  *   In RAW mode               
- * TeeWriter ---> Client (length(int)  byte[length])*
+ * TeeWriter ---&gt; Client (length(int)  byte[length])*
  *              An indefinite sequence of length, followed by byte array.
  *              
  *  In Writable mode
- * TeeWriter ---> Client    (Chunk serialized as Writable)*
+ * TeeWriter ---&gt; Client    (Chunk serialized as Writable)*
  *              An indefinite sequence of serialized chunks
  *              
  *  In English: clients should connect and say either "RAW " or "WRITABLE "