You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@chukwa.apache.org by ey...@apache.org on 2011/12/21 21:16:21 UTC

svn commit: r1221864 [1/3] - in /incubator/chukwa/trunk: ./ src/main/java/org/apache/hadoop/chukwa/ src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/ src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/ src/main/java/org/apache...

Author: eyang
Date: Wed Dec 21 20:16:17 2011
New Revision: 1221864

URL: http://svn.apache.org/viewvc?rev=1221864&view=rev
Log:
CHUKWA-621. Implemented findbugs and pmd code analysis check. (Eric Yang)

Modified:
    incubator/chukwa/trunk/CHANGES.txt
    incubator/chukwa/trunk/pom.xml
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChukwaArchiveKey.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChunkImpl.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMBuilder.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntry.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntryPartitioner.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/JobHistoryTaskDataMapper.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Aggregator.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/DatabaseConfig.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Macro.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/ExecAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/FileAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/MemBuffered.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SocketAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/UDPAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/WriteaheadBuffered.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/RCheckFTAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TerminatorThread.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SystemMetrics.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/CommitCheckServlet.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ClientFinalizer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/AsyncAckSender.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FilePerPostWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/NullWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/PipelineStageWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/OutputCollector.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ViewStore.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveBuilder.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveDataTypeOutputFormat.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/RecordMerger.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobConfProcessor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SysLog.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Chart.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/ChukwaInputFormat.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetrics.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetricsList.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4jMetricsSink.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueInfoProcessor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/ExecPlugin.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/IPlugin.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/ExecHelper.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/nodeactivity/NodeActivityPlugin.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/pbsnode/PbsNodePlugin.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/CatalogBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/CategoryBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/ColumnBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/PagesBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/ReturnCodeBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/resource/UserResource.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/resource/ViewContextResolver.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/resource/ViewResource.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/rest/resource/WidgetContextResolver.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/BackfillingLoader.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/ClassUtils.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/ClusterConfig.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/ConstRateAdaptor.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/CopySequenceFile.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/DumpChunks.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/Filter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/MaxRateSender.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/PidFile.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/util/XssFilter.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/mapred/ChukwaJobTrackerInstrumentation.java
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
    incubator/chukwa/trunk/src/site/site.xml
    incubator/chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java

Modified: incubator/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/CHANGES.txt?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/CHANGES.txt (original)
+++ incubator/chukwa/trunk/CHANGES.txt Wed Dec 21 20:16:17 2011
@@ -110,6 +110,8 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    CHUKWA-621. Implemented findbugs and pmd code analysis check. (Eric Yang)
+
     CHUKWA-619. Disabled trace method on Chukwa servlets. (Julio Conca via Eric Yang)
 
     CHUKWA-617. Fixed test case casting and race conditions. (Eric Yang)

Modified: incubator/chukwa/trunk/pom.xml
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/pom.xml?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/pom.xml (original)
+++ incubator/chukwa/trunk/pom.xml Wed Dec 21 20:16:17 2011
@@ -737,25 +737,29 @@
       <plugins>
         <plugin>
             <artifactId>maven-jxr-plugin</artifactId>
+            <version>2.3</version>
         </plugin>
         <plugin>
             <artifactId>maven-pmd-plugin</artifactId>
+            <version>2.6</version>
             <reportSets>
                 <reportSet>
                     <reports>
                         <report>pmd</report>
-                            <report>cpd</report>
-                        </reports>
-                    </reportSet>
+                        <report>cpd</report>
+                    </reports>
+                </reportSet>
             </reportSets>
             <configuration>
-                <targetJdk>1.5</targetJdk>
+                <targetJdk>1.6</targetJdk>
             </configuration>
         </plugin>
         <plugin>
             <groupId>org.codehaus.mojo</groupId>
             <artifactId>findbugs-maven-plugin</artifactId>
+            <version>2.3.3</version>
             <configuration>
+<!--                <onlyAnalyze>org.apache.hadoop.chukwa.datacollection.*</onlyAnalyze> -->
                 <threshold>Normal</threshold>
                 <effort>Max</effort>
             </configuration>

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChukwaArchiveKey.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChukwaArchiveKey.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChukwaArchiveKey.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChukwaArchiveKey.java Wed Dec 21 20:16:17 2011
@@ -225,6 +225,7 @@ public class ChukwaArchiveKey extends or
   }
 
   public Object clone() throws CloneNotSupportedException {
+    super.clone();
     ChukwaArchiveKey _rio_other = new ChukwaArchiveKey();
     _rio_other.timePartition = this.timePartition;
     _rio_other.dataType = this.dataType;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChunkImpl.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChunkImpl.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChunkImpl.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/ChunkImpl.java Wed Dec 21 20:16:17 2011
@@ -74,7 +74,7 @@ public class ChunkImpl implements org.ap
     this.tags = dataFactory.getDefaultTags();
     this.streamName = streamName;
     this.dataType = dataType;
-    this.data = data;
+    this.data = (byte[]) data.clone();
     this.initiator = source;
   }
 
@@ -82,14 +82,14 @@ public class ChunkImpl implements org.ap
    * @see org.apache.hadoop.chukwa.Chunk#getData()
    */
   public byte[] getData() {
-    return data;
+    return data.clone();
   }
 
   /**
    * @see org.apache.hadoop.chukwa.Chunk#setData(byte[])
    */
   public void setData(byte[] logEvent) {
-    this.data = logEvent;
+    this.data = (byte[]) logEvent.clone();
   }
 
   /**
@@ -153,11 +153,11 @@ public class ChunkImpl implements org.ap
   public int[] getRecordOffsets() {
     if (recordEndOffsets == null)
       recordEndOffsets = new int[] { data.length - 1 };
-    return recordEndOffsets;
+    return recordEndOffsets.clone();
   }
 
   public void setRecordOffsets(int[] offsets) {
-    recordEndOffsets = offsets;
+    recordEndOffsets = (int[]) offsets.clone();
   }
 
   public String getDataType() {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java Wed Dec 21 20:16:17 2011
@@ -19,15 +19,8 @@
 package org.apache.hadoop.chukwa.analysis.salsa.fsm;
 
 import java.io.IOException;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.util.Iterator;
-import java.util.TreeMap;
 import java.util.ArrayList;
-import java.util.TreeSet;
 import java.util.regex.*;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -37,12 +30,6 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.util.*;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.filecache.DistributedCache;
 
 /**
  * Pluggable mapper for FSMBuilder
@@ -61,7 +48,6 @@ public class DataNodeClientTraceMapper 
 	protected static String FSM_CRK_ReduceType = FSMType.NAMES[FSMType.FILESYSTEM_FSM];
 	private final Pattern ipPattern =
     Pattern.compile(".*[a-zA-Z\\-_:\\/]([0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+)[a-zA-Z0-9\\-_:\\/].*");
-  private final Pattern logMsgPattern = Pattern.compile("^(.{23}) ([A-Z]+) ([a-zA-Z0-9\\.]+): (.*)");
 
   public void map
     (ChukwaRecordKey key, ChukwaRecord val,
@@ -69,10 +55,6 @@ public class DataNodeClientTraceMapper 
 		 Reporter reporter)
     throws IOException 
   {
-		String newkey = new String("");
-		String key_trimmed = key.toString().trim();
-		String task_type;
-		FSMIntermedEntry this_rec = new FSMIntermedEntry(); 
 
 		/* Extract field names for checking */
 		String [] fieldNames = val.getFields();
@@ -92,7 +74,6 @@ public class DataNodeClientTraceMapper 
 		} 
 		// ignore "DataNode" type log messages; unsupported
 				
-		return;
   } // end of map()
 
   protected final int DEFAULT_READ_DURATION_MS = 10;
@@ -230,7 +211,6 @@ public class DataNodeClientTraceMapper 
     output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_start), start_rec);
     output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_end), end_rec);
     
-    return;
   }
 
 } // end of mapper class

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMBuilder.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMBuilder.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMBuilder.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMBuilder.java Wed Dec 21 20:16:17 2011
@@ -19,12 +19,8 @@
 package org.apache.hadoop.chukwa.analysis.salsa.fsm;
 
 import java.io.IOException;
-import java.io.DataInput;
-import java.io.DataOutput;
 import java.util.Iterator;
-import java.util.TreeMap;
 import java.util.ArrayList;
-import java.util.TreeSet;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -35,17 +31,9 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.util.*;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.filecache.DistributedCache;
 import org.apache.hadoop.chukwa.extraction.demux.processor.ChukwaOutputCollector;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 /**
  * FSM Builder
  * 
@@ -119,7 +107,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }
 
     /**
@@ -144,7 +131,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }
 
     /**
@@ -158,7 +144,7 @@ public class FSMBuilder extends Configur
       assert(fnl.contains("TIME_START"));
       assert(fnl.contains("COUNTER_BYTES"));
 
-      String mapid, redid, statename;
+      String mapid, redid;
       String id_parts[];
       
       id_parts = (cr.getValue("TASK_ID")).split("@");
@@ -179,7 +165,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }    
 
     /**
@@ -204,7 +189,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }
 
     /**
@@ -229,7 +213,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }    
     
     /**
@@ -254,7 +237,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }    
     
     protected void addStitchingFields_blockwrite
@@ -277,7 +259,6 @@ public class FSMBuilder extends Configur
       cr.add(JCDF_ID2, id2);
       cr.add(JCDF_EDGE_TIME, et);
       cr.add(JCDF_EDGE_VOL, ev);
-      return;
     }
 
     public void addStitchingFields
@@ -310,7 +291,6 @@ public class FSMBuilder extends Configur
         addStitchingFields_blockwrite(cr, fieldNamesList);
       } 
       // else add nothing
-      return;
     }
 
     public void reduce
@@ -325,7 +305,7 @@ public class FSMBuilder extends Configur
 			String newkey;
 			ArrayList<FSMIntermedEntry> ents = new ArrayList<FSMIntermedEntry>();
 			ArrayList<String> noncounters = new ArrayList<String>();
-			keystr.trim();
+			keystr = keystr.trim();
 			ChukwaRecord cr = new ChukwaRecord();
 			
 			for (int i = 0; i < NON_COUNTER_KEYS.length; i++) noncounters.add(NON_COUNTER_KEYS[i]);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntry.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntry.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntry.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntry.java Wed Dec 21 20:16:17 2011
@@ -23,8 +23,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.util.Iterator;
 import java.util.TreeMap;
-import java.util.ArrayList;
-import java.util.TreeSet;
 import java.util.Set;
 
 import org.apache.hadoop.io.WritableComparable;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntryPartitioner.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntryPartitioner.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntryPartitioner.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/FSMIntermedEntryPartitioner.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@
 package org.apache.hadoop.chukwa.analysis.salsa.fsm;
 
 import org.apache.hadoop.mapred.Partitioner;
-import org.apache.hadoop.mapred.JobConfigurable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 
@@ -35,7 +34,6 @@ public class FSMIntermedEntryPartitioner
 	
 	public void configure(JobConf job) {
 		// do nothing
-		return;
 	}
 
 }
\ No newline at end of file

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/JobHistoryTaskDataMapper.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/JobHistoryTaskDataMapper.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/JobHistoryTaskDataMapper.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/JobHistoryTaskDataMapper.java Wed Dec 21 20:16:17 2011
@@ -19,12 +19,7 @@
 package org.apache.hadoop.chukwa.analysis.salsa.fsm;
 
 import java.io.IOException;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.util.Iterator;
-import java.util.TreeMap;
 import java.util.ArrayList;
-import java.util.TreeSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -34,12 +29,6 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.util.*;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.filecache.DistributedCache;
 
 /**
  * Pluggable mapper for FSMBuilder
@@ -90,7 +79,6 @@ public class JobHistoryTaskDataMapper 
 		
 		assert(mapCounterDestNames.length == mapCounterNames.length);
 		
-		String currstr = new String();
 		for (int i = 0; i < mapCounterDestNames.length; i++) {
 			if (fieldNamesList.contains(mapCounterNames[i])) {
 				this_rec.add_info.put(mapCounterDestNames[i], val.getValue(mapCounterNames[i]));				
@@ -134,7 +122,6 @@ public class JobHistoryTaskDataMapper 
 		
 		assert(redCounterDestNames.length == redCounterNames.length);
 		
-		String currstr = new String();
 		for (int i = 0; i < redCounterDestNames.length; i++) {
 			if (fieldNamesList.contains(redCounterNames[i])) {
 				this_rec.add_info.put(redCounterDestNames[i], val.getValue(redCounterNames[i]));				
@@ -152,7 +139,6 @@ public class JobHistoryTaskDataMapper 
 		 Reporter reporter)
     throws IOException 
   {
-		String newkey = new String("");
 		String task_type;
 		FSMIntermedEntry this_rec = new FSMIntermedEntry(); 
 		boolean add_record = true;
@@ -170,10 +156,6 @@ public class JobHistoryTaskDataMapper 
 			task_type = val.getValue("TASK_TYPE"); 
 			if (!task_type.equals("MAP") && !task_type.equals("REDUCE")) {
 				return; // do nothing
-			} else {
-				// newkey = newkey.concat(task_type);
-				// newkey = newkey.concat("@");
-				// newkey = newkey.concat(val.getValue("TASK_ATTEMPT_ID"));
 			} 
 		} 
 
@@ -222,7 +204,6 @@ public class JobHistoryTaskDataMapper 
 		this_rec.state_name = new String(this_rec.state_mapred.toString());
 		this_rec.identifier = new String(val.getValue("TASK_ATTEMPT_ID"));
 		this_rec.generateUniqueID();
-		newkey = new String(this_rec.getUniqueID());
 		
 		// Extract hostname from tracker name (if present), or directly fill from hostname (<= 0.18)
 		if (fieldNamesList.contains("HOSTNAME")) {
@@ -281,7 +262,6 @@ public class JobHistoryTaskDataMapper 
 		  output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType,this_rec.getUniqueID()),this_rec); 
 	  }
 		
-		return;
   } // end of map()
 
 	protected boolean expandReduceStart
@@ -291,7 +271,6 @@ public class JobHistoryTaskDataMapper 
 			throws IOException
 	{
 		FSMIntermedEntry redshuf_start_rec = null;
-		String newkey = new String("");
 		
 		try {
 			redshuf_start_rec = this_rec.clone();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java Wed Dec 21 20:16:17 2011
@@ -19,15 +19,8 @@
 package org.apache.hadoop.chukwa.analysis.salsa.fsm;
 
 import java.io.IOException;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.util.Iterator;
-import java.util.TreeMap;
 import java.util.ArrayList;
-import java.util.TreeSet;
 import java.util.regex.*;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -38,12 +31,6 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.util.*;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.filecache.DistributedCache;
 
 /**
  * Pluggable mapper for FSMBuilder
@@ -69,10 +56,6 @@ public class TaskTrackerClientTraceMappe
 		 Reporter reporter)
     throws IOException 
   {
-		String newkey = new String("");
-		String key_trimmed = key.toString().trim();
-		String task_type;
-		FSMIntermedEntry this_rec = new FSMIntermedEntry(); 
 
 		/* Extract field names for checking */
 		String [] fieldNames = val.getFields();
@@ -90,7 +73,6 @@ public class TaskTrackerClientTraceMappe
 		    parseClientTraceDetailed(key, val, output, reporter, fieldNamesList);
 	    } // pick up only mapreduce operations
 		} 
-		return;
   } // end of map()
 
   protected final int DEFAULT_SHUFFLE_DURATION_MS = 10;
@@ -215,7 +197,6 @@ public class TaskTrackerClientTraceMappe
     output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_start), start_rec);
     output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_end), end_rec);
     
-    return;
   }
 
 } // end of mapper class

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@
 package org.apache.hadoop.chukwa.analysis.salsa.visualization;
 
 import prefuse.data.io.sql.*;
-import prefuse.data.Table;
 import prefuse.data.expression.parser.*;
 import prefuse.data.expression.*;
 import prefuse.data.column.*;
@@ -41,16 +40,12 @@ import org.apache.hadoop.chukwa.database
 import org.apache.hadoop.chukwa.util.XssFilter;
 
 import javax.servlet.http.*;
-import javax.swing.BorderFactory;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import java.sql.*;
 import java.util.*;
-import java.text.NumberFormat;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 
 import java.awt.Font;
 import java.awt.geom.Rectangle2D;
@@ -371,9 +366,6 @@ public class Heatmap {
     legend_labels_table.setString(1,"label","Src. Hosts");
     legend_labels_table.setString(2,"label","Dest. Hosts");
     
-    float start_x = LEGEND_X_OFFSET;
-    float start_y = LEGEND_Y_OFFSET + BORDER[1] + (BOXWIDTH/2);    
-    float incr = this.BOXWIDTH;
     VisualTable legend_labels_table_viz = this.viz.addTable(addinfogroup, legend_labels_table);
 
     legend_labels_table_viz.setFloat(0, VisualItem.X, this.SIZE_X/2);
@@ -418,7 +410,7 @@ public class Heatmap {
     HeatmapData hd = this.setupDataTable();
 
     // setup bounds
-    int width, realwidth;
+    int width;
     if (SIZE_X-BORDER[0]-BORDER[2] < SIZE_Y-BORDER[1]-BORDER[3]) {
       BOXWIDTH = (SIZE_X-BORDER[0]-BORDER[2]) / hd.num_hosts;
     } else {
@@ -486,7 +478,7 @@ public class Heatmap {
     int statlen = stat.length;
     long [] rowSums = new long[statlen];
     int [] permute = new int[statlen];
-    int i,j,k;
+    int i,j;
 
     // initialize permutation
     for (i = 0; i < statlen; i++) {
@@ -585,7 +577,6 @@ public class Heatmap {
       int col = rmeta.getColumnCount();
       while (rs.next()) {
         HashMap<String, Object> event = new HashMap<String, Object>();
-        long event_time=0;
         for(int i=1;i<=col;i++) {
           if(rmeta.getColumnType(i)==java.sql.Types.TIMESTAMP) {
             event.put(rmeta.getColumnName(i),rs.getTimestamp(i).getTime());
@@ -603,7 +594,6 @@ public class Heatmap {
     } finally {
       dbw.close();
     }    
-    SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
 
     log.info(events.size() + " results returned.");
 
@@ -646,7 +636,6 @@ public class Heatmap {
         end=(Long)event.get("finish_time");
         start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
         end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
-        String cell = (String) event.get("state_name");      
         String this_host = (String) event.get("hostname");
         String other_host = (String) event.get("other_host");
         int this_host_idx = host_indices.get(this_host).intValue();
@@ -662,7 +651,6 @@ public class Heatmap {
         end=(Long)event.get("finish_time");
         start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
         end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
-        String cell = (String) event.get("state_name");      
         String this_host = (String) event.get("hostname");
         String other_host = (String) event.get("other_host");
         int this_host_idx = host_indices.get(this_host).intValue();
@@ -686,7 +674,6 @@ public class Heatmap {
         end=(Long)event.get("finish_time");
         start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
         end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
-        String cell = (String) event.get("state_name");      
         String this_host = (String) event.get("hostname");
         String other_host = (String) event.get("other_host");
         int this_host_idx = host_indices.get(this_host).intValue();
@@ -710,7 +697,6 @@ public class Heatmap {
         end=(Long)event.get("finish_time");
         start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
         end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
-        String cell = (String) event.get("state_name");      
         String this_host = (String) event.get("hostname");
         String other_host = (String) event.get("other_host");
         int this_host_idx = host_indices.get(this_host).intValue();
@@ -728,7 +714,6 @@ public class Heatmap {
         end=(Long)event.get("finish_time");
         start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
         end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
-        String cell = (String) event.get("state_name");      
         String this_host = (String) event.get("hostname");
         String other_host = (String) event.get("other_host");
         int this_host_idx = host_indices.get(this_host).intValue();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@
 package org.apache.hadoop.chukwa.analysis.salsa.visualization;
 
 import prefuse.data.io.sql.*;
-import prefuse.data.Table;
 import prefuse.data.expression.parser.*;
 import prefuse.data.expression.*;
 import prefuse.data.column.*;
@@ -45,12 +44,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import javax.servlet.http.*;
-import javax.swing.BorderFactory;
 
 import java.sql.*;
 import java.util.*;
-import java.text.NumberFormat;
-import java.text.DateFormat;
 
 import java.awt.Font;
 import java.awt.geom.Rectangle2D;
@@ -213,10 +209,8 @@ public class Swimlanes {
       while (state_iter.hasNext()) {
         curr_state = state_iter.next();
         
-        if (this.collate_reduces) {
-          if (curr_state.equals("reduce_reducer") || curr_state.equals("reduce_sort")) {
-            continue;
-          }
+        if (this.collate_reduces && ((curr_state.equals("reduce_reducer") || curr_state.equals("reduce_sort")))) {
+          continue;
         }
         rownumiter = this.plot_tab.rows(
           (Predicate) ExpressionParser.parse("[state_name] == '"+curr_state+"'")
@@ -243,10 +237,7 @@ public class Swimlanes {
     
     public void groupByStartTime() {
       int counter, rownum;
-      int rowcount = this.plot_tab.getRowCount();
-      HashSet<String> states = new HashSet<String>();
       String curr_state = null;
-      Iterator<String> state_iter;
       IntIterator rownumiter;
      
       rownumiter = this.plot_tab.rowsSortedBy(START_FIELD_NAME, true);
@@ -271,10 +262,7 @@ public class Swimlanes {
     
     public void groupByEndTime() {
       int counter, rownum;
-      int rowcount = this.plot_tab.getRowCount();
-      HashSet<String> states = new HashSet<String>();
       String curr_state = null;
-      Iterator<String> state_iter;
       IntIterator rownumiter;
      
       rownumiter = this.plot_tab.rowsSortedBy(END_FIELD_NAME, true);
@@ -396,9 +384,7 @@ public class Swimlanes {
       SwimlanesStatePalette pal = new SwimlanesStatePalette();
       
       Iterator curr_group_items = this.m_vis.items(this.m_group);
-      
-      int i = 0;
-      
+          
       while (curr_group_items.hasNext()) {
         item = (VisualItem) curr_group_items.next();
         
@@ -416,7 +402,6 @@ public class Swimlanes {
 
         item.set(VisualItem.POLYGON,coords);
         item.setStrokeColor(pal.getColour(item.getString("state_name")));
-        i++;
       }
     }    
   } // SwimlanesStateAction
@@ -725,7 +710,6 @@ public class Swimlanes {
     shapes_table.addColumn(VisualItem.Y,float.class);
     
     Table legend_labels_table = new Table();
-    Table legend_squares_table = new Table();
     legend_labels_table.addColumn("label",String.class);
     
     // add labels
@@ -884,7 +868,6 @@ public class Swimlanes {
     
     Table rs_tab = null;    
     DatabaseDataSource dds; 
-    DefaultSQLDataHandler dh = new DefaultSQLDataHandler();
 
     log.debug("Query: " + query);
     // execute query

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Aggregator.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Aggregator.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Aggregator.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Aggregator.java Wed Dec 21 20:16:17 2011
@@ -22,24 +22,13 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
 import java.text.ParsePosition;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.sql.DatabaseMetaData;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
-import org.apache.hadoop.chukwa.util.ExceptionUtil;
-import org.apache.hadoop.chukwa.util.PidFile;
 
 @SuppressWarnings("unused")
 public class Aggregator {
@@ -125,10 +114,9 @@ public class Aggregator {
     String[] query = queries.split("\n");
     while(startTime<=endTime) {
       for (int i = 0; i < query.length; i++) {
-        if (query[i].equals("")) {
-        } else if (query[i].indexOf("#") == 0) {
+        if (query[i].indexOf("#") == 0) {
           log.debug("skipping: " + query[i]);
-        } else {
+        } else if(!query[i].equals("")) {
           Aggregator dba = new Aggregator();
           long start = Calendar.getInstance().getTimeInMillis();
           try {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/DatabaseConfig.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/DatabaseConfig.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/DatabaseConfig.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/DatabaseConfig.java Wed Dec 21 20:16:17 2011
@@ -22,17 +22,10 @@ package org.apache.hadoop.chukwa.databas
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 import java.io.File;
 import java.io.FilenameFilter;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-
 public class DatabaseConfig {
-    private static Log log = LogFactory.getLog(DatabaseConfig.class);
   private Configuration config = null;
   public final static long CENTURY = 36500 * 24 * 60 * 60 * 1000L;
   public final static long DECADE = 3650 * 24 * 60 * 60 * 1000L;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Macro.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Macro.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Macro.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/Macro.java Wed Dec 21 20:16:17 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.chukwa.databas
 import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
-import java.text.SimpleDateFormat;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.regex.Matcher;
@@ -152,7 +151,6 @@ public class Macro {
             }
             return meta;
         } else if(macro.indexOf("now")==0) {
-            SimpleDateFormat sdf = new SimpleDateFormat();
             return DatabaseWriter.formatTimeStamp(current);
         } else if(macro.intern()=="start".intern()) {
             return DatabaseWriter.formatTimeStamp(start);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/database/MetricsAggregation.java Wed Dec 21 20:16:17 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.chukwa.databas
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
-import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/ExecAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/ExecAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/ExecAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/ExecAdaptor.java Wed Dec 21 20:16:17 2011
@@ -20,12 +20,12 @@ package org.apache.hadoop.chukwa.datacol
 
 
 import org.apache.hadoop.chukwa.ChunkImpl;
-import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
 import org.apache.hadoop.chukwa.inputtools.plugin.ExecPlugin;
 import org.apache.log4j.Logger;
 import org.apache.log4j.helpers.ISO8601DateFormat;
-import org.json.JSONException;
-import org.json.JSONObject;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.ParseException;
+
 import java.util.*;
 
 /**
@@ -68,7 +68,7 @@ public class ExecAdaptor extends Abstrac
       JSONObject o = exec.execute();
       try {
 
-        if (o.getInt("status") == exec.statusKO) {
+        if (((Integer) o.get("status")).intValue() == exec.statusKO) {
           deregisterAndStop();
           return;
         }
@@ -84,12 +84,12 @@ public class ExecAdaptor extends Abstrac
           result.append(" INFO org.apache.hadoop.chukwa.");
           result.append(type);
           result.append("= ");
-          result.append(o.getString("exitValue"));
+          result.append(o.get("exitValue"));
           result.append(": ");
-          result.append(o.getString("stdout"));
+          result.append((String) o.get("stdout"));
           data = result.toString().getBytes();
         } else {
-          String stdout = o.getString("stdout");
+          String stdout = (String) o.get("stdout");
           data = stdout.getBytes();
         }
 
@@ -110,10 +110,8 @@ public class ExecAdaptor extends Abstrac
         //We can't replay exec data, so we might as well commit to it now.
         control.reportCommit(ExecAdaptor.this, sendOffset);
         dest.add(c);
-      } catch (JSONException e) {
-        log.warn(e);
       } catch (InterruptedException e) {
-        ;
+        log.debug(e);
       } 
     }
   };

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/FileAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/FileAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/FileAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/FileAdaptor.java Wed Dec 21 20:16:17 2011
@@ -27,6 +27,7 @@ import org.apache.hadoop.chukwa.ChunkImp
 import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
 import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
@@ -183,7 +184,7 @@ public class FileAdaptor extends Abstrac
            try {
              reader.close();
            } catch (Exception e) {
-            // do nothing
+             log.debug(ExceptionUtil.getStackTrace(e));
           }
            reader = null;
          }
@@ -205,7 +206,7 @@ public class FileAdaptor extends Abstrac
       try {
         reader.close();
       } catch (Exception e) {
-       // do nothing
+        log.debug(ExceptionUtil.getStackTrace(e));
      }
       reader = null;
     } 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/MemBuffered.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/MemBuffered.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/MemBuffered.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/MemBuffered.java Wed Dec 21 20:16:17 2011
@@ -21,7 +21,6 @@ import static org.apache.hadoop.chukwa.d
 import java.util.*;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
-import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
 
 public class MemBuffered extends AbstractWrapper {
   

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SocketAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SocketAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SocketAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/SocketAdaptor.java Wed Dec 21 20:16:17 2011
@@ -18,20 +18,15 @@
 package org.apache.hadoop.chukwa.datacollection.adaptor;
 
 import java.io.BufferedInputStream;
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.OutputStream;
 import java.net.*;
-import java.util.ArrayList;
 
 import org.apache.hadoop.chukwa.*;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.log4j.Logger;
 import org.apache.log4j.PatternLayout;
-import org.apache.log4j.spi.LoggerRepository;
 import org.apache.log4j.spi.LoggingEvent;
 
 /**
@@ -77,6 +72,7 @@ public class SocketAdaptor extends Abstr
       try {
         listener.close();
       } catch (IOException e) {
+        log.debug(ExceptionUtil.getStackTrace(e));
       }
     }
   }
@@ -130,8 +126,9 @@ public class SocketAdaptor extends Abstr
           try {
             server.close();
           } catch(InterruptedIOException e) {
-              Thread.currentThread().interrupt();
+            Thread.currentThread().interrupt();
           } catch(IOException ex) {
+            log.debug(ExceptionUtil.getStackTrace(ex));
           }
         }
       }
@@ -142,6 +139,7 @@ public class SocketAdaptor extends Abstr
         ois.close();
         server.close();
       } catch (IOException e) {
+        log.debug(ExceptionUtil.getStackTrace(e));
       }
     }
   }
@@ -176,7 +174,9 @@ public class SocketAdaptor extends Abstr
     try {
       running = false;
       disp.shutdown();
-    } catch(Exception e) {}
+    } catch(Exception e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
+    }
     return 0;
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/UDPAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/UDPAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/UDPAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/UDPAdaptor.java Wed Dec 21 20:16:17 2011
@@ -21,7 +21,6 @@ import java.io.IOException;
 import java.net.*;
 import java.util.Arrays;
 import org.apache.hadoop.chukwa.*;
-import org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor;
 import org.apache.log4j.Logger;
 
 public class UDPAdaptor extends AbstractAdaptor {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/WriteaheadBuffered.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/WriteaheadBuffered.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/WriteaheadBuffered.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/WriteaheadBuffered.java Wed Dec 21 20:16:17 2011
@@ -22,7 +22,6 @@ import java.io.*;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
-import org.apache.hadoop.chukwa.datacollection.agent.AdaptorManager;
 import org.apache.log4j.Logger;
 import static org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy.*;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailer.java Wed Dec 21 20:16:17 2011
@@ -21,9 +21,6 @@ package org.apache.hadoop.chukwa.datacol
 
 import java.util.List;
 import java.util.concurrent.CopyOnWriteArrayList;
-import org.apache.hadoop.chukwa.datacollection.ChunkQueue;
-import org.apache.hadoop.chukwa.datacollection.DataFactory;
-import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/FileTailingAdaptor.java Wed Dec 21 20:16:17 2011
@@ -18,18 +18,10 @@
 
 package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
 
-
-import java.io.File;
 import java.io.IOException;
 import java.io.RandomAccessFile;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import org.apache.hadoop.chukwa.ChunkImpl;
-import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
 import org.apache.hadoop.chukwa.datacollection.adaptor.*;
-import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 /**
  * An adaptor that repeatedly tails a specified file, sending the new bytes.
@@ -195,11 +187,11 @@ public class FileTailingAdaptor extends 
             }
             newReader =null;
           } catch (Throwable e) {
-            // do nothing.
+            log.debug(ExceptionUtil.getStackTrace(e));
           }
         }
       } catch (IOException e) {
-        // do nothing, if file doesn't exist.
+        log.debug(ExceptionUtil.getStackTrace(e));
       }
       if (len >= fileReadOffset) {
         if (offsetOfFirstByte > fileReadOffset) {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/RCheckFTAdaptor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/RCheckFTAdaptor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/RCheckFTAdaptor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/RCheckFTAdaptor.java Wed Dec 21 20:16:17 2011
@@ -24,9 +24,7 @@ import java.io.RandomAccessFile;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 import java.util.Collections;
-import java.util.Queue;
 import java.util.LinkedList;
-import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
 
 /**
  * Checkpoint state:

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TerminatorThread.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TerminatorThread.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TerminatorThread.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TerminatorThread.java Wed Dec 21 20:16:17 2011
@@ -17,10 +17,8 @@
  */
 package org.apache.hadoop.chukwa.datacollection.adaptor.filetailer;
 
-
-import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
-import org.apache.hadoop.chukwa.datacollection.adaptor.FileAdaptor;
 import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class TerminatorThread extends Thread {
   private static Logger log = Logger.getLogger(TerminatorThread.class);
@@ -69,7 +67,7 @@ public class TerminatorThread extends Th
     try {
       adaptor.reader.close();
     } catch (Throwable ex) {
-      // do nothing
+      log.debug(ExceptionUtil.getStackTrace(ex));
     }
   }
 }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/jms/JMSMessagePropertyTransformer.java Wed Dec 21 20:16:17 2011
@@ -121,11 +121,9 @@ public class JMSMessagePropertyTransform
       String value = transformValue(propertyName, propertyValue);
 
       // is a required value not found?
-      if (value == null) {
-        if (requiredPropertyNames == null ||
-            requiredPropertyNames.contains(propertyName)) {
-          return null;
-        }
+      if (value == null && (requiredPropertyNames == null ||
+          requiredPropertyNames.contains(propertyName))) {
+        return null;
       }
 
       if (valuesFound > 0) {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SystemMetrics.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SystemMetrics.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SystemMetrics.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SystemMetrics.java Wed Dec 21 20:16:17 2011
@@ -37,8 +37,6 @@ import java.util.Timer;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AbstractAdaptor;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorException;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
-import org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSMessageTransformer;
-import org.apache.hadoop.chukwa.datacollection.adaptor.jms.JMSTextMessageTransformer;
 import org.apache.log4j.Logger;
 
 public class SystemMetrics extends AbstractAdaptor {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AdaptorResetThread.java Wed Dec 21 20:16:17 2011
@@ -19,12 +19,10 @@ package org.apache.hadoop.chukwa.datacol
 
 import java.util.*;
 import org.apache.hadoop.conf.*;
-import org.apache.hadoop.chukwa.datacollection.DataFactory;
 import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
 import org.apache.hadoop.chukwa.datacollection.collector.servlet.CommitCheckServlet;
 import org.apache.hadoop.chukwa.datacollection.sender.AsyncAckSender;
-import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
 import org.apache.log4j.Logger;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/AgentControlSocketListener.java Wed Dec 21 20:16:17 2011
@@ -27,10 +27,10 @@ import java.io.InputStreamReader;
 import java.io.PrintStream;
 import java.net.*;
 import java.util.Map;
-import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorException;
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorShutdownPolicy;
 import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 /**
  * Class to handle the agent control protocol. This is a simple line-oriented
@@ -88,7 +88,9 @@ public class AgentControlSocketListener 
         log.warn("a control connection broke", e);
         try {
           connection.close();
-        } catch(Exception ex) {}
+        } catch(Exception ex) {
+          log.debug(ExceptionUtil.getStackTrace(ex));
+        }
       }
     }
 
@@ -258,6 +260,7 @@ public class AgentControlSocketListener 
         s.close();
       s = null;
     } catch (IOException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     } // ignore exception on close
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java Wed Dec 21 20:16:17 2011
@@ -48,6 +48,7 @@ import org.apache.hadoop.chukwa.datacoll
 import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
 import org.apache.hadoop.chukwa.util.AdaptorNamingUtils;
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.log4j.Logger;
@@ -90,7 +91,7 @@ public class ChukwaAgent implements Adap
     return agent;
   }
 
-  Configuration conf = null;
+  static Configuration conf = null;
   Connector connector = null;
 
   // doesn't need an equals(), comparator, etc
@@ -164,15 +165,14 @@ public class ChukwaAgent implements Adap
         System.exit(0);
       }
 
-      Configuration conf = readConfig();
+      conf = readConfig();
+      agent = new ChukwaAgent(conf);
       
-      ChukwaAgent localAgent = new ChukwaAgent(conf);
-
       if (agent.anotherAgentIsRunning()) {
         System.out
             .println("another agent is running (or port has been usurped). "
                 + "Bailing out now");
-        DaemonWatcher.bailout(-1);
+        throw new AlreadyRunningException();
       }
 
       int uriArgNumber = 0;
@@ -198,6 +198,7 @@ public class ChukwaAgent implements Adap
       System.out
           .println("agent started already on this machine with same portno;"
               + " bailing out");
+      DaemonWatcher.bailout(-1);
       System.exit(0); // better safe than sorry
     } catch (Exception e) {
       e.printStackTrace();
@@ -744,15 +745,14 @@ public class ChukwaAgent implements Adap
           .getAbsolutePath());
     conf.set("chukwaAgent.initial_adaptors", new File(chukwaConf,
         "initial_adaptors").getAbsolutePath());
-    
-    
     try { 
       Configuration chukwaAgentConf = new Configuration(false);
       chukwaAgentConf.addResource(new Path(agentConf.getAbsolutePath()));
       Checker.checkConf(new OptDictionary(new File(new File(chukwaHome, "share/chukwa/lib"), "agent.dict")),
           HSlurper.fromHConf(chukwaAgentConf));
-    } catch(Exception e) {e.printStackTrace();}
-    
+    } catch(Exception e) {
+      e.printStackTrace();
+    }    
     return conf;
   }
 
@@ -783,6 +783,7 @@ public class ChukwaAgent implements Adap
         if (needNewCheckpoint)
           writeCheckpoint(); // write a last checkpoint here, before stopping
       } catch (IOException e) {
+        log.debug(ExceptionUtil.getStackTrace(e));
       }
     }
     // adaptors

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/rest/AdaptorController.java Wed Dec 21 20:16:17 2011
@@ -21,9 +21,12 @@ import org.apache.hadoop.chukwa.datacoll
 import org.apache.hadoop.chukwa.datacollection.adaptor.AdaptorException;
 import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
 import org.apache.hadoop.chukwa.datacollection.OffsetStatsManager;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.json.JSONObject;
 import org.json.JSONException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 import javax.ws.rs.Path;
 import javax.ws.rs.GET;
@@ -70,6 +73,7 @@ import java.util.Map;
 public class AdaptorController {
 
   private static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat();
+  private static final Log log = LogFactory.getLog(AdaptorController.class);
 
   static {
     DECIMAL_FORMAT.setMinimumFractionDigits(2);
@@ -345,7 +349,9 @@ public class AdaptorController {
   private static String fetchOptionalString(JSONObject json, String name) {
     try {
       return json.getString(name);
-    } catch (JSONException e) {}
+    } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
+    }
     return null;
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/CommitCheckServlet.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/CommitCheckServlet.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/CommitCheckServlet.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/CommitCheckServlet.java Wed Dec 21 20:16:17 2011
@@ -27,7 +27,6 @@ import javax.servlet.http.HttpServletReq
 import javax.servlet.http.HttpServletResponse;
 import org.apache.log4j.Logger;
 import java.util.*;
-import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
 import org.apache.hadoop.chukwa.extraction.CHUKWA_CONSTANT;
 import org.apache.hadoop.chukwa.extraction.archive.SinkArchiver;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java Wed Dec 21 20:16:17 2011
@@ -166,7 +166,6 @@ public class HttpConnector implements Co
           Thread.currentThread().interrupt();
           break;
         }
-        int toSend = newQueue.size();
         List<ChukwaHttpSender.CommitListEntry> results = connectorClient
             .send(newQueue);
         // checkpoint the chunks which were committed

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ChukwaAgentController.java Wed Dec 21 20:16:17 2011
@@ -164,7 +164,7 @@ public class ChukwaAgentController {
           .getInputStream()));
       String resp = br.readLine();
       if (resp == null || !resp.startsWith("OK")) {
-        // error. What do we do?
+        log.error("adaptor unregister error, id: " + id);
       } else if (resp.startsWith("OK")) {
         String[] respSplit = resp.split(" ");
         String newOffset = respSplit[respSplit.length - 1];

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ClientFinalizer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ClientFinalizer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ClientFinalizer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/controller/ClientFinalizer.java Wed Dec 21 20:16:17 2011
@@ -18,15 +18,11 @@
 
 package org.apache.hadoop.chukwa.datacollection.controller;
 
-import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
-import org.apache.log4j.Logger;
 import org.apache.log4j.helpers.LogLog;
 
 
 public class ClientFinalizer extends Thread {
   private ChukwaAgentController chukwaClient = null;
-  private String recordType = null;
-  private String fileName = null;
 
   public ClientFinalizer(ChukwaAgentController chukwaClient) {
     this.chukwaClient = chukwaClient;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/AsyncAckSender.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/AsyncAckSender.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/AsyncAckSender.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/AsyncAckSender.java Wed Dec 21 20:16:17 2011
@@ -17,12 +17,7 @@
  */
 package org.apache.hadoop.chukwa.datacollection.sender;
 
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.datacollection.DataFactory;
 import org.apache.hadoop.chukwa.datacollection.agent.*;
 import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/ChukwaHttpSender.java Wed Dec 21 20:16:17 2011
@@ -40,7 +40,6 @@ import org.apache.commons.httpclient.met
 import org.apache.commons.httpclient.params.HttpMethodParams;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.datacollection.adaptor.Adaptor;
-import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
 import org.apache.hadoop.chukwa.datacollection.sender.metrics.HttpSenderMetrics;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.DataOutputBuffer;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/sender/RetryListOfCollectors.java Wed Dec 21 20:16:17 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.chukwa.datacol
 
 
 import java.io.*;
-import java.net.URL;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FilePerPostWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FilePerPostWriter.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FilePerPostWriter.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/test/FilePerPostWriter.java Wed Dec 21 20:16:17 2011
@@ -21,10 +21,8 @@ import java.io.IOException;
 import java.net.URI;
 
 
-import java.util.Calendar;
 import java.util.List;
 import java.util.Timer;
-import java.util.concurrent.Semaphore;
 import java.util.concurrent.atomic.AtomicLong;
 import org.apache.hadoop.chukwa.ChukwaArchiveKey;
 import org.apache.hadoop.chukwa.Chunk;
@@ -37,7 +35,6 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile;
-import org.mortbay.log.Log;
 
 /**
  * A writer that writes a file for each post. Intended ONLY for architectural

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/NullWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/NullWriter.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/NullWriter.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/NullWriter.java Wed Dec 21 20:16:17 2011
@@ -20,6 +20,8 @@ package org.apache.hadoop.chukwa.datacol
 import java.util.List;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 /**
  * Minimal writer; does nothing with data.
@@ -29,6 +31,7 @@ import org.apache.hadoop.conf.Configurat
  *
  */
 public class NullWriter implements ChukwaWriter {
+  private static final Logger log = Logger.getLogger(NullWriter.class);
   
   //in kb per sec
   int maxDataRate = Integer.MAX_VALUE;
@@ -41,19 +44,19 @@ public class NullWriter implements Chukw
         dataBytes +=c.getData().length;
       if(maxDataRate > 0)
         Thread.sleep(dataBytes / maxDataRate);
-    } catch(Exception e) {}
+    } catch(Exception e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
+    }
     return COMMIT_OK;
   }
 
   @Override
   public void close() throws WriterException {
-    return;
   }
 
   @Override
   public void init(Configuration c) throws WriterException {
     maxDataRate = c.getInt(RATE_OPT_NAME, 0);
-    return;
   }
 
 }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/PipelineStageWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/PipelineStageWriter.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/PipelineStageWriter.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/PipelineStageWriter.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@
 package org.apache.hadoop.chukwa.datacollection.writer;
 
 
-import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.conf.Configuration;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SocketTeeWriter.java Wed Dec 21 20:16:17 2011
@@ -22,15 +22,13 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.ArrayBlockingQueue;
 import java.util.regex.PatternSyntaxException;
 import org.apache.hadoop.chukwa.Chunk;
-import org.apache.hadoop.chukwa.util.DumpChunks;
 import org.apache.hadoop.chukwa.util.Filter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
-import org.mortbay.log.Log;
 import java.net.ServerSocket;
 import java.net.Socket;
-import java.net.SocketAddress;
 import java.io.*;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 /**
  * Effectively a "Tee" in the writer pipeline.
@@ -95,7 +93,7 @@ public class SocketTeeWriter extends Pip
           new Tee(sock);
         }
       } catch(IOException e) {
-        
+        log.debug(ExceptionUtil.getStackTrace(e)); 
       }
     }
     
@@ -105,7 +103,7 @@ public class SocketTeeWriter extends Pip
         s.close(); //to break out of run loop
         this.interrupt();
       } catch(IOException e) {
-        
+        log.debug(ExceptionUtil.getStackTrace(e)); 
       }
     }
   }
@@ -222,7 +220,9 @@ public class SocketTeeWriter extends Pip
       try {
         out.close();
         in.close();
-      } catch(Exception e) {}
+      } catch(Exception e) {
+        log.debug(ExceptionUtil.getStackTrace(e));
+      }
     }
 
     public void handle(Chunk c) {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/HBaseWriter.java Wed Dec 21 20:16:17 2011
@@ -30,25 +30,17 @@ import org.apache.hadoop.chukwa.datacoll
 import org.apache.hadoop.chukwa.datacollection.writer.PipelineableWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.WriterException;
 import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter.CommitStatus;
-
 import org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MapProcessor;
 import org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MapProcessorFactory;
-
 import org.apache.hadoop.chukwa.util.ClassUtils;
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.HTablePool;
-
-import org.apache.hadoop.chukwa.datacollection.writer.hbase.OutputCollector;
-import org.apache.hadoop.chukwa.datacollection.writer.hbase.Reporter;
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Table;
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Tables;
 import org.apache.log4j.Logger;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/OutputCollector.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/OutputCollector.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/OutputCollector.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/hbase/OutputCollector.java Wed Dec 21 20:16:17 2011
@@ -20,15 +20,11 @@ package org.apache.hadoop.chukwa.datacol
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Calendar;
 import java.util.List;
-import java.util.TimeZone;
 
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.log4j.Logger;
 
 public class OutputCollector implements
     org.apache.hadoop.mapred.OutputCollector<ChukwaRecordKey, ChukwaRecord> {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java Wed Dec 21 20:16:17 2011
@@ -120,6 +120,7 @@ public class LocalToRemoteHdfsMover exte
         throw new RuntimeException("Cannot rename remote file, " + pRemoteFilePath + " to " + pFinalRemoteFilePath);
       }
     }catch(FileNotFoundException ex) {
+      log.debug("File not found: " + remoteFilePath);
       //do nothing since if the file is no longer there it's
       // because it has already been moved over by the cleanup task.
     }