You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@chukwa.apache.org by ey...@apache.org on 2011/12/21 21:16:21 UTC

svn commit: r1221864 [2/3] - in /incubator/chukwa/trunk: ./ src/main/java/org/apache/hadoop/chukwa/ src/main/java/org/apache/hadoop/chukwa/analysis/salsa/fsm/ src/main/java/org/apache/hadoop/chukwa/analysis/salsa/visualization/ src/main/java/org/apache...

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java Wed Dec 21 20:16:17 2011
@@ -19,8 +19,6 @@
 package org.apache.hadoop.chukwa.dataloader;
 
 import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -31,8 +29,6 @@ public abstract class DataLoaderFactory 
   static FileSystem fs = null;
   protected FileStatus[] source = null;
 
-  private static Log log = LogFactory.getLog(DataLoaderFactory.class);
-
   public DataLoaderFactory() {
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@ package org.apache.hadoop.chukwa.dataloa
 
 import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutorCompletionService;
@@ -31,7 +30,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.chukwa.analysis.salsa.fsm.FSMBuilder;
@@ -108,6 +106,7 @@ public class FSMDataLoader extends DataL
           outputPaths.add(outputPath);
           if(hasData) {
             int res = ToolRunner.run(fsmConf, new FSMBuilder(), args);
+            log.debug("Job Status: "+res);
           }
         }
       }
@@ -137,7 +136,6 @@ public class FSMDataLoader extends DataL
     } catch(Exception e) {
       log.error(ExceptionUtil.getStackTrace(e));
       throw new IOException();
-    } finally {
     }
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java Wed Dec 21 20:16:17 2011
@@ -19,10 +19,8 @@
 package org.apache.hadoop.chukwa.dataloader;
 
 import java.io.IOException;
-import java.net.URISyntaxException;
 import java.sql.Connection;
 import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.text.SimpleDateFormat;
@@ -262,6 +260,7 @@ public class MetricDataLoader implements
           priKeys = mdlConfig.get("report.db.primary.key." + recordType).split(
               ",");
         } catch (Exception nullException) {
+          log.debug(ExceptionUtil.getStackTrace(nullException));
         }
         for (String field : fields) {
           String keyName = escape(field.toLowerCase(), newSpace);
@@ -352,6 +351,7 @@ public class MetricDataLoader implements
             }
           } catch (Exception nullException) {
             // ignore if primary key is empty
+            log.debug(ExceptionUtil.getStackTrace(nullException));
           }
           // Map the hash objects to database table columns
           StringBuilder sqlValues = new StringBuilder();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java Wed Dec 21 20:16:17 2011
@@ -68,7 +68,6 @@ public class MetricDataLoaderPool extend
     } catch(Exception e) {
       log.error(ExceptionUtil.getStackTrace(e));
       throw new IOException();
-    } finally {
     }
   }
 
@@ -77,4 +76,4 @@ public class MetricDataLoaderPool extend
     executor.awaitTermination(30, TimeUnit.SECONDS);
     executor.shutdownNow();
   }
-}
\ No newline at end of file
+}

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java Wed Dec 21 20:16:17 2011
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.chukwa.datastore;
 
-import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -27,16 +26,13 @@ import java.util.concurrent.CopyOnWriteA
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.chukwa.datacollection.writer.hbase.HBaseWriter;
 import org.apache.hadoop.chukwa.hicc.bean.Series;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.HTablePool;
@@ -47,7 +43,6 @@ import org.apache.hadoop.hbase.filter.Ro
 import org.apache.hadoop.hbase.filter.RegexStringComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.log4j.Logger;
-import org.mortbay.log.Log;
 
 public class ChukwaHBaseStore {
   private static Configuration hconf = HBaseConfiguration.create();
@@ -83,7 +78,6 @@ public class ChukwaHBaseStore {
         scan.setFilter(rf);
       }
       ResultScanner results = table.getScanner(scan);
-      long step = startTime;
       Iterator<Result> it = results.iterator();
       // TODO: Apply discrete wavelet transformation to limit the output
       // size to 1000 data points for graphing optimization. (i.e jwave)
@@ -110,7 +104,6 @@ public class ChukwaHBaseStore {
     Set<String> familyNames = new CopyOnWriteArraySet<String>();
     try {
       HTableInterface table = pool.getTable(tableName);
-      Calendar c = Calendar.getInstance();
       Set<byte[]> families = table.getTableDescriptor().getFamiliesKeys();
       for(byte[] name : families) {
         familyNames.add(new String(name));

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ViewStore.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ViewStore.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ViewStore.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datastore/ViewStore.java Wed Dec 21 20:16:17 2011
@@ -38,9 +38,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
-import org.json.simple.parser.JSONParser;
 
 public class ViewStore {
   private String vid = null;
@@ -53,7 +50,6 @@ public class ViewStore {
   private static String publicViewPath = viewPath+File.separator+"public";
   private static String usersViewPath = viewPath+File.separator+"users";
   private static String PUBLIC = "public".intern();
-  private static String PRIVATE = "private".intern();
 
   public ViewStore() throws IllegalAccessException {
     ViewStore.config = HiccWebServer.getConfig();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveBuilder.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveBuilder.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveBuilder.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveBuilder.java Wed Dec 21 20:16:17 2011
@@ -24,9 +24,6 @@ import java.util.Iterator;
 import org.apache.hadoop.chukwa.ChukwaArchiveKey;
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
-import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -39,7 +36,6 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
-import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveDataTypeOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveDataTypeOutputFormat.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveDataTypeOutputFormat.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveDataTypeOutputFormat.java Wed Dec 21 20:16:17 2011
@@ -24,7 +24,6 @@ import org.apache.hadoop.chukwa.ChukwaAr
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.extraction.engine.RecordUtil;
 import org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordWriter;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java Wed Dec 21 20:16:17 2011
@@ -51,11 +51,6 @@ public class ChukwaArchiveManager implem
     init();
   }
 
-  public ChukwaArchiveManager(ChukwaConfiguration conf) throws Exception {
-    conf = new ChukwaConfiguration();
-    init();
-  }
-  
   protected void init() throws IOException, URISyntaxException {
     String fsName = conf.get(HDFS_DEFAULT_NAME_FIELD);
     fs = FileSystem.get(new URI(fsName), conf);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java Wed Dec 21 20:16:17 2011
@@ -29,7 +29,7 @@ import org.apache.hadoop.chukwa.conf.Chu
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
-import org.apache.hadoop.chukwa.util.PidFile;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -269,6 +269,7 @@ public class DailyChukwaRecordRolling ex
       } // End Try workingDay =
         // Integer.parseInt(sdf.format(dayFS.getPath().getName()));
       catch (NumberFormatException e) { /* Not a standard Day directory skip */
+        log.debug(ExceptionUtil.getStackTrace(e));
       }
 
     } // for(FileStatus dayFS : daysFS)

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java Wed Dec 21 20:16:17 2011
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.chukwa.extraction.demux;
 
-import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -58,7 +57,6 @@ public class DemuxManager implements CHU
   
   protected SimpleDateFormat dayTextFormat = new java.text.SimpleDateFormat("yyyyMMdd");
   protected volatile boolean isRunning = true;
-  private final static String demuxPath = System.getenv("CHUKWA_HOME")+File.separator+"lib"+File.separator+"demux";
 
   final private static PathFilter DATA_SINK_FILTER = new PathFilter() {
     public boolean accept(Path file) {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java Wed Dec 21 20:16:17 2011
@@ -29,7 +29,6 @@ import org.apache.hadoop.chukwa.conf.Chu
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
-import org.apache.hadoop.chukwa.util.PidFile;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java Wed Dec 21 20:16:17 2011
@@ -25,7 +25,6 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
-import java.util.Collection;
 
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.dataloader.DataLoaderFactory;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/RecordMerger.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/RecordMerger.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/RecordMerger.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/RecordMerger.java Wed Dec 21 20:16:17 2011
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class RecordMerger extends Thread {
   static Logger log = Logger.getLogger(RecordMerger.class);
@@ -77,6 +78,7 @@ public class RecordMerger extends Thread
                                                  * Not an Hour or Minutes
                                                  * directory- Do nothing
                                                  */
+              log.debug(ExceptionUtil.getStackTrace(e));
             }
           }
         }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java Wed Dec 21 20:16:17 2011
@@ -92,7 +92,7 @@ public class HadoopMetricsProcessor exte
       idx = recordEntry.indexOf(' ', start);
       // String className = recordEntry.substring(start, idx-1);
       String body = recordEntry.substring(idx + 1);
-      body.replaceAll("\n", "");
+      body = body.replaceAll("\n", "");
       // log.info("record [" + recordEntry + "] body [" + body +"]");
       Date d = sdf.parse(dStr);
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobConfProcessor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobConfProcessor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobConfProcessor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobConfProcessor.java Wed Dec 21 20:16:17 2011
@@ -104,7 +104,6 @@ public class JobConfProcessor extends Ab
             NodeList fields = prop.getChildNodes();
             String attr = null;
             String value = null;
-            boolean finalParameter = false;
             for (int j = 0; j < fields.getLength(); j++) {
                 Node fieldNode = fields.item(j);
                 if (!(fieldNode instanceof Element))
@@ -114,8 +113,6 @@ public class JobConfProcessor extends Ab
                     attr = ((Text)field.getFirstChild()).getData().trim();
                 if ("value".equals(field.getTagName()) && field.hasChildNodes())
                     value = ((Text)field.getFirstChild()).getData();
-                if ("final".equals(field.getTagName()) && field.hasChildNodes())
-                    finalParameter = "true".equals(((Text)field.getFirstChild()).getData());
             }
             
             // Ignore this parameter if it has already been marked as 'final'

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java Wed Dec 21 20:16:17 2011
@@ -24,7 +24,6 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.Iterator;
 
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Tables;
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Table;
@@ -34,8 +33,6 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.log4j.Logger;
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
 
 @Tables(annotations={
 @Table(name="Jobs",columnFamily="summary")
@@ -69,7 +66,7 @@ public class JobSummary extends Abstract
       idx = recordEntry.indexOf(' ', start);
       // String className = recordEntry.substring(start, idx-1);
       String body = recordEntry.substring(idx + 1);
-      body.replaceAll("\n", "");
+      body = body.replaceAll("\n", "");
       // log.info("record [" + recordEntry + "] body [" + body +"]");
       Date d = sdf.parse(dStr);
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SysLog.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SysLog.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SysLog.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SysLog.java Wed Dec 21 20:16:17 2011
@@ -54,7 +54,7 @@ public class SysLog extends AbstractProc
       start = idx + 1;
       idx = recordEntry.indexOf(' ', start);
       String body = recordEntry.substring(idx + 1);
-      body.replaceAll("\n", "");
+      body = body.replaceAll("\n", "");
 
       Calendar convertDate = Calendar.getInstance();
       Date d = sdf.parse(dStr);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java Wed Dec 21 20:16:17 2011
@@ -23,7 +23,6 @@
 package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
 
 import java.util.Calendar;
-import java.util.HashMap;
 import java.util.Iterator;
 import java.util.TimeZone;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Torque.java Wed Dec 21 20:16:17 2011
@@ -50,7 +50,7 @@ public class Torque extends AbstractProc
       start = idx + 1;
       idx = recordEntry.indexOf(' ', start);
       String body = recordEntry.substring(idx + 1);
-      body.replaceAll("\n", "");
+      body = body.replaceAll("\n", "");
       Date d = sdf.parse(dStr);
       String[] kvpairs = body.split(", ");
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java Wed Dec 21 20:16:17 2011
@@ -19,13 +19,6 @@
 package org.apache.hadoop.chukwa.extraction.demux.processor.reducer;
 
 import java.io.IOException;
-import java.net.InetAddress;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.HashMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 import java.util.Iterator;
 
 import org.apache.hadoop.chukwa.extraction.engine.Record;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java Wed Dec 21 20:16:17 2011
@@ -202,7 +202,7 @@ public class ChukwaRecordJT extends org.
       java.util.Set<String> _rio_set20 = _rio_peer.mapFields.keySet();
       java.util.Iterator<String> _rio_miter10 = _rio_set10.iterator();
       java.util.Iterator<String> _rio_miter20 = _rio_set20.iterator();
-      for (; _rio_miter10.hasNext() && _rio_miter20.hasNext();) {
+      while(_rio_miter10.hasNext() && _rio_miter20.hasNext()) {
         String _rio_k10 = _rio_miter10.next();
         String _rio_k20 = _rio_miter20.next();
         _rio_ret = _rio_k10.compareTo(_rio_k20);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java Wed Dec 21 20:16:17 2011
@@ -22,7 +22,6 @@ package org.apache.hadoop.chukwa.extract
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java Wed Dec 21 20:16:17 2011
@@ -22,7 +22,6 @@ package org.apache.hadoop.chukwa.extract
 
 
 import java.sql.Connection;
-import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
@@ -37,10 +36,14 @@ import org.apache.hadoop.chukwa.extracti
 import org.apache.hadoop.chukwa.extraction.engine.Token;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 //import org.apache.hadoop.chukwa.hicc.ClusterConfig;
 
 public class DatabaseDS implements DataSource {
+  private static final Log log = LogFactory.getLog(DatabaseDS.class);
 
   public SearchResult search(SearchResult result, String cluster,
       String dataSource, long t0, long t1, String filter, Token token)
@@ -127,7 +130,7 @@ public class DatabaseDS implements DataS
         try {
           rs.close();
         } catch (SQLException sqlEx) {
-          // ignore
+          log.debug(ExceptionUtil.getStackTrace(sqlEx));
         }
         rs = null;
       }
@@ -135,7 +138,7 @@ public class DatabaseDS implements DataS
         try {
           stmt.close();
         } catch (SQLException sqlEx) {
-          // ignore
+          log.debug(ExceptionUtil.getStackTrace(sqlEx));
         }
         stmt = null;
       }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Chart.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Chart.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Chart.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Chart.java Wed Dec 21 20:16:17 2011
@@ -19,19 +19,16 @@
 package org.apache.hadoop.chukwa.hicc;
 
 
-import java.io.PrintWriter;
-import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
-import java.util.Set;
-import java.util.Date;
-import java.text.ParseException;
+import java.util.Map.Entry;
 import java.text.SimpleDateFormat;
 import javax.servlet.http.HttpServletRequest;
-import org.apache.hadoop.chukwa.hicc.ColorPicker;
+import javax.swing.text.html.HTMLDocument.Iterator;
+
 import org.apache.hadoop.chukwa.util.XssFilter;
 import org.json.JSONArray;
 
@@ -148,7 +145,7 @@ public class Chart {
   }
 
   public void setSeriesOrder(String[] metrics) {
-    this.seriesOrder = metrics;
+    this.seriesOrder = (String[]) metrics.clone();
   }
 
   public void setXAxisLabels(boolean toggle) {
@@ -190,7 +187,6 @@ public class Chart {
   }
 
   public String plot() {
-    SimpleDateFormat format = new SimpleDateFormat("m:s:S");
     StringBuilder output = new StringBuilder();
     if (dataset == null && restData == null) {
       output.append("No Data available.");
@@ -352,108 +348,110 @@ public class Chart {
     output.append("_series=[\n");
     ColorPicker cp = new ColorPicker();
     int i = 0;
-    for (TreeMap<String, TreeMap<String, Double>> dataMap : this.dataset) {
-      String[] keyNames = null;
-      if (this.seriesOrder != null) {
-        keyNames = this.seriesOrder;
-      } else {
-        keyNames = dataMap.keySet().toArray(
-            new String[dataMap.size()]);
-      }
-      int counter = 0;
-      if (i != 0) {
-        if (!this.userDefinedMax) {
-          this.max = 0;
-        }
-      }
-      for (String seriesName : keyNames) {
-        int counter2 = 0;
-        if ((counter != 0) || (i != 0)) {
-          output.append(",");
+    if(this.dataset!=null) {
+      for (TreeMap<String, TreeMap<String, Double>> dataMap : this.dataset) {
+        String[] keyNames;
+        if (this.seriesOrder != null) {
+          keyNames = this.seriesOrder;
+        } else {
+          keyNames = dataMap.keySet().toArray(
+              new String[dataMap.size()]);
         }
-        String param = "fill: false, lineWidth: 1";
-        String type = "lines";
-        if (this.chartType.get(i).intern() == "stack-area".intern()
-            || this.chartType.get(i).intern() == "area".intern()) {
-          param = "fill: true, lineWidth: 0";
+        int counter = 0;
+        if (i != 0 && !this.userDefinedMax) {
+            this.max = 0;
         }
-        if (this.chartType.get(i).intern() == "bar".intern()) {
-          type = "bars";
-          param = "stepByStep: true, lineWidth: 0";
-        }
-        if (this.chartType.get(i).intern() == "point".intern()) {
-          type = "points";
-          param = "fill: false";
-        }
-        output.append("  {");
-        output.append(type);
-        output.append(": { show: true, ");
-        output.append(param);
-        output.append(" }, color: \"");
-        output.append(cp.getNext());
-        output.append("\", label: \"");
-        output.append(seriesName);
-        output.append("\", ");
-        String showYAxis = "false";
-        String shortRow = "false";
-        if (counter == 0 || i > 0) {
-          showYAxis = "true";
-          shortRow = "false";
-        }
-        output.append(" row: { show: ");
-        output.append(showYAxis);
-        output.append(",shortRow:");
-        output.append(shortRow);
-        output.append(", showYAxis:");
-        output.append(showYAxis);
-        output.append("}, data:[");
-        TreeMap<String, Double> data = dataMap.get(seriesName);
-        if(data!=null) {
-          for (String dp : data.keySet()) {
-            int rangeLabel = 0;
-            if (counter2 != 0) {
-              output.append(",");
-            }
-            if (xLabel.equals("Time")) {
-              if (data.get(dp) == Double.NaN) {
-                output.append("[");
-                output.append(dp);
-                output.append(",NULL]");
-              } else {
-                output.append("[");
-                output.append(dp);
+        for (String seriesName : keyNames) {
+          int counter2 = 0;
+          if ((counter != 0) || (i != 0)) {
+            output.append(",");
+          }
+          String param = "fill: false, lineWidth: 1";
+          String type = "lines";
+          if (this.chartType.get(i).intern() == "stack-area".intern()
+              || this.chartType.get(i).intern() == "area".intern()) {
+            param = "fill: true, lineWidth: 0";
+          }
+          if (this.chartType.get(i).intern() == "bar".intern()) {
+            type = "bars";
+            param = "stepByStep: true, lineWidth: 0";
+          }
+          if (this.chartType.get(i).intern() == "point".intern()) {
+            type = "points";
+            param = "fill: false";
+          }
+          output.append("  {");
+          output.append(type);
+          output.append(": { show: true, ");
+          output.append(param);
+          output.append(" }, color: \"");
+          output.append(cp.getNext());
+          output.append("\", label: \"");
+          output.append(seriesName);
+          output.append("\", ");
+          String showYAxis = "false";
+          String shortRow = "false";
+          if (counter == 0 || i > 0) {
+            showYAxis = "true";
+            shortRow = "false";
+          }
+          output.append(" row: { show: ");
+          output.append(showYAxis);
+          output.append(",shortRow:");
+          output.append(shortRow);
+          output.append(", showYAxis:");
+          output.append(showYAxis);
+          output.append("}, data:[");
+          TreeMap<String, Double> data = dataMap.get(seriesName);
+          if(data!=null) {
+            java.util.Iterator<Entry<String, Double>> iter = data.entrySet().iterator();
+            while (iter.hasNext()) {
+              Map.Entry<String, Double> entry = (Map.Entry<String, Double>) iter.next();
+              int rangeLabel = 0;
+              if (counter2 != 0) {
                 output.append(",");
-                output.append(data.get(dp));
-                output.append("]");
               }
-            } else {
-              long value = xLabelRangeHash.get(dp);
-              if (data.get(dp) == Double.NaN) {
-                output.append("[");
-                output.append(value);
-                output.append(",NULL]");
+              if (xLabel.equals("Time")) {
+                if (Double.isNaN(entry.getValue())) {
+                  output.append("[");
+                  output.append(entry.getKey());
+                  output.append(",NULL]");
+                } else {
+                  output.append("[");
+                  output.append(entry.getKey());
+                  output.append(",");
+                  output.append(entry.getValue());
+                  output.append("]");
+                }
               } else {
-                output.append("[");
-                output.append(value);
-                output.append(",");
-                output.append(data.get(dp));
-                output.append("]");
+                long value = xLabelRangeHash.get(entry.getKey());
+                if (Double.isNaN(entry.getValue())) {
+                  output.append("[");
+                  output.append(value);
+                  output.append(",NULL]");
+                } else {
+                  output.append("[");
+                  output.append(value);
+                  output.append(",");
+                  output.append(entry.getValue());
+                  output.append("]");
+                }
+                rangeLabel++;
               }
-              rangeLabel++;
+              counter2++;
             }
-            counter2++;
           }
+          output.append("], min:0");
+          if (this.userDefinedMax) {
+            output.append(", max:");
+            output.append(this.max);
+          }
+          output.append("}");
+          counter++;
         }
-        output.append("], min:0");
-        if (this.userDefinedMax) {
-          output.append(", max:");
-          output.append(this.max);
-        }
-        output.append("}");
-        counter++;
+        i++;
+      }
       }
-      i++;
-    }
     output.append(" ];\n");
     if(this.restData!=null) {
       JSONArray arr = new JSONArray();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java Wed Dec 21 20:16:17 2011
@@ -25,8 +25,7 @@ import java.util.*;
 import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
 
 public class ClusterConfig {
-  public static Set<String> clusterMap = null;
-  private String path = System.getenv("CHUKWA_CONF_DIR") + File.separator;
+  private static Set<String> clusterMap = null;
 
   static public String getContents(File aFile) {
     // ...checks on aFile are elided

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@
 package org.apache.hadoop.chukwa.hicc;
 
 
-import java.text.SimpleDateFormat;
 import java.util.TreeMap;
 import java.util.HashMap;
 import java.util.ArrayList;
@@ -27,6 +26,7 @@ import java.util.List;
 import java.sql.*;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class DatasetMapper {
   private String jdbc;
@@ -42,7 +42,6 @@ public class DatasetMapper {
 
   public void execute(String query, boolean groupBySecondColumn,
       boolean calculateSlope, String formatTime, List<Object> parameters) {
-    SimpleDateFormat sdf = null;
     dataset.clear();
     try {
       // The newInstance() call is a work around for some
@@ -55,11 +54,8 @@ public class DatasetMapper {
     Connection conn = null;
     PreparedStatement stmt = null;
     ResultSet rs = null;
-    int counter = 0;
-    int size = 0;
     labels.clear();
     double max = 0.0;
-    int labelsCount = 0;
     long timeWindowSize=0;
     long previousTime=0;
     try {
@@ -88,11 +84,10 @@ public class DatasetMapper {
           String label = "";
           if (rmeta.getColumnType(1) == java.sql.Types.TIMESTAMP) {
             long time = rs.getTimestamp(1).getTime();
-            if(time==previousTime) {
-            } else if(timeWindowSize==0) {
+            if(timeWindowSize==0) {
               timeWindowSize=1;
               previousTime=time;
-            } else {
+            } else if(time!=previousTime) {
               timeWindowSize=(time-previousTime)/60000;
               previousTime=time;
             }
@@ -119,7 +114,7 @@ public class DatasetMapper {
                 double tmp = 0L;
                 if (data.size() > 1) {
                   tmp = (current - previousHash.get(item).doubleValue())/timeWindowSize;
-                  if(tmp==Double.NEGATIVE_INFINITY || tmp==Double.POSITIVE_INFINITY) {
+                  if(timeWindowSize<=0) {
                     tmp = Double.NaN;
                   }
                 } else {
@@ -158,7 +153,7 @@ public class DatasetMapper {
                 double tmp = current;
                 if (data.size() > 1) {
                   tmp = (tmp - previousArray[j])/timeWindowSize;
-                  if(tmp==Double.NEGATIVE_INFINITY || tmp==Double.POSITIVE_INFINITY) {
+                  if(timeWindowSize<=0) {
                     tmp = Double.NaN;
                   }
                 } else {
@@ -176,7 +171,6 @@ public class DatasetMapper {
             }
           }
         }
-        labelsCount = i;
       } else {
         log.error("query is not executed.");
       }
@@ -187,6 +181,7 @@ public class DatasetMapper {
       log.error("SQLState: " + ex.getSQLState());
       log.error("VendorError: " + ex.getErrorCode());
     } catch (Exception ex) {
+      log.debug(ExceptionUtil.getStackTrace(ex));
     } finally {
       // it is a good idea to release
       // resources in a finally{} block
@@ -196,7 +191,7 @@ public class DatasetMapper {
         try {
           rs.close();
         } catch (SQLException sqlEx) {
-          // ignore
+          log.debug(ExceptionUtil.getStackTrace(sqlEx));
         }
         rs = null;
       }
@@ -204,7 +199,7 @@ public class DatasetMapper {
         try {
           stmt.close();
         } catch (SQLException sqlEx) {
-          // ignore
+          log.debug(ExceptionUtil.getStackTrace(sqlEx));
         }
         stmt = null;
       }
@@ -212,7 +207,7 @@ public class DatasetMapper {
         try {
           conn.close();
         } catch (SQLException sqlEx) {
-          // ignore
+          log.debug(ExceptionUtil.getStackTrace(sqlEx));
         }
         conn = null;
       }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java Wed Dec 21 20:16:17 2011
@@ -24,12 +24,9 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.net.URLConnection;
 import java.util.ArrayList;
 import java.util.Enumeration;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 import java.util.jar.JarEntry;
 import java.util.jar.JarFile;
 
@@ -37,64 +34,48 @@ import org.apache.hadoop.chukwa.conf.Chu
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.json.JSONObject;
 import org.mortbay.jetty.Server;
-import org.mortbay.jetty.handler.ContextHandler;
 import org.mortbay.xml.XmlConfiguration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 public class HiccWebServer {
   private static Log log = LogFactory.getLog(HiccWebServer.class);
-  private static URL serverConf = null;
+  private static URL serverConf = HiccWebServer.class.getResource("/WEB-INF/jetty.xml");
   private Server server = null;
   private String chukwaHdfs;
   private String hiccData;
-  public static ChukwaConfiguration chukwaConf = new ChukwaConfiguration();
-  public static Configuration config = new Configuration();
-  public static FileSystem fs = null;
   private static HiccWebServer instance = null;
+  private static final Configuration config = new Configuration();
+  protected static final ChukwaConfiguration chukwaConf = new ChukwaConfiguration();
 
   protected HiccWebServer() {
   }
-
-//  public HiccWebServer(Configuration conf) {
-//    config = conf;
-//  }
-//  
+ 
   public static HiccWebServer getInstance() {
     if(instance==null) {
-      config = new Configuration();
       instance = new HiccWebServer();
     }
+    if(serverConf==null) {
+      log.error("Unable to locate jetty-web.xml.");
+      DaemonWatcher.bailout(-1);
+    }
     return instance;
- }
+  }
 
   public void start() {
     try {
-      if(fs==null) {
-        fs = FileSystem.get(config);
-        chukwaHdfs = config.get("fs.default.name")+File.separator+chukwaConf.get("chukwa.data.dir");
-        hiccData = chukwaHdfs+File.separator+"hicc";
-        DaemonWatcher.createInstance("hicc");
-        serverConf = HiccWebServer.class.getResource("/WEB-INF/jetty.xml");
-        if(serverConf==null) {
-          log.error("Unable to locate jetty-web.xml.");
-          DaemonWatcher.bailout(-1);
-        }
-        instance = this;
-        setupDefaultData();
-        run();
-      }
+      chukwaHdfs = config.get("fs.default.name")+File.separator+chukwaConf.get("chukwa.data.dir");
+      hiccData = chukwaHdfs+File.separator+"hicc";
+      DaemonWatcher.createInstance("hicc");
+      setupDefaultData();
+      run();
     } catch(Exception e) {
       log.error("HDFS unavailable, check configuration in chukwa-env.sh.");
-      System.exit(-1);
+      DaemonWatcher.bailout(-1);
     }
   }
 
@@ -102,10 +83,6 @@ public class HiccWebServer {
     return config;
   }
   
-  public static FileSystem getFileSystem() {
-    return fs;
-  }
-  
   public List<String> getResourceListing(String path) throws URISyntaxException, IOException {
     ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
     URL dirURL = contextClassLoader.getResource(path);
@@ -138,14 +115,15 @@ public class HiccWebServer {
   }
   
   public void populateDir(List<String> files, Path path) {
-    ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
+    try {
+      FileSystem fs = FileSystem.get(config);
+      ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
       for(String source : files) {
         String name = source.substring(source.indexOf(File.separator));
         Path dest = new Path(path.toString()+File.separator+name);
         InputStream is = contextClassLoader.getResourceAsStream(source);
         StringBuilder sb = new StringBuilder();
         String line = null;
-
         try {
           BufferedReader reader = new BufferedReader(new InputStreamReader(is));
           while ((line = reader.readLine()) != null) {
@@ -154,16 +132,20 @@ public class HiccWebServer {
           FSDataOutputStream out = fs.create(dest);
           out.write(sb.toString().getBytes());
           out.close();
-          } catch(IOException e) {
+          reader.close();
+        } catch(IOException e) {
             log.error("Error writing file: "+dest.toString());
-          }
+        }
       }
+    } catch(IOException e) {
+      log.error("HDFS unavailable, check configuration in chukwa-env.sh.");
+    }
   }
   
   public void setupDefaultData() {
     Path hiccPath = new Path(hiccData);
     try {
-      fs = FileSystem.get(config);
+      FileSystem fs = FileSystem.get(config);
       if(!fs.exists(hiccPath)) {
         log.info("Initializing HICC Datastore.");
         // Create chukwa directory
@@ -209,8 +191,10 @@ public class HiccWebServer {
         populateDir(views, viewsPath);
         log.info("HICC Datastore initialization completed.");
       }
-    } catch (Exception ex) {
+    } catch (IOException ex) {
       log.error(ExceptionUtil.getStackTrace(ex));
+    } catch (URISyntaxException ex) {
+      log.error(ExceptionUtil.getStackTrace(ex));      
     }
   }
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java Wed Dec 21 20:16:17 2011
@@ -17,21 +17,13 @@
  */
 package org.apache.hadoop.chukwa.hicc;
 
-import java.awt.Component;
 import java.awt.Graphics;
-import java.awt.Graphics2D;
-import java.awt.Image;
-import java.awt.RenderingHints;
 import java.awt.geom.AffineTransform;
 import java.awt.image.AffineTransformOp;
 import java.awt.image.BufferedImage;
-import java.awt.image.CropImageFilter;
-import java.awt.image.FilteredImageSource;
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -40,7 +32,6 @@ import javax.imageio.ImageIO;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
-import org.apache.hadoop.chukwa.util.XssFilter;
 
 public class ImageSlicer {
   private BufferedImage src = null;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java Wed Dec 21 20:16:17 2011
@@ -22,9 +22,12 @@ package org.apache.hadoop.chukwa.hicc;
 import java.net.*;
 import java.io.*;
 import org.json.*;
+import org.apache.log4j.Logger;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class JSONLoader {
   public JSONArray jsonData;
+  private static Logger log = Logger.getLogger(JSONLoader.class);
 
   static public String getContents(String source) {
     // ...checks on aFile are elided
@@ -57,6 +60,7 @@ public class JSONLoader {
       JSONObject rows = new JSONObject(buffer);
       jsonData = new JSONArray(rows.get("rows").toString());
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e)); 
     }
   }
 
@@ -65,6 +69,7 @@ public class JSONLoader {
     try {
       ts = ((JSONObject) jsonData.get(i)).get("ts").toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e)); 
     }
     return ts;
   }
@@ -75,6 +80,7 @@ public class JSONLoader {
       tags = ((JSONObject) jsonData.get(i)).get("tags")
           .toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e)); 
     }
     return tags;
   }
@@ -85,6 +91,7 @@ public class JSONLoader {
       value = ((JSONObject) jsonData.get(i)).get("value")
           .toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e)); 
     }
     return value;
   }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java Wed Dec 21 20:16:17 2011
@@ -52,12 +52,10 @@ public class OfflineTimeHandler {
 
   public void init(HashMap<String, String> map) {
     Calendar now = Calendar.getInstance();
-    String timeType = "last";
-    if (map.get("time_type") == null
+    if (map == null || (map != null 
         && map.get("time_type") == null
-        && map.get("period") == null
-        && map.get("period") == null) {
-      timeType = "last";
+        && map.get("time_type") == null
+        && map.get("period") == null)) {
       end = now.getTimeInMillis();
       start = end - 60 * 60 * 1000;
     } else if (map.get("period") != null

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java Wed Dec 21 20:16:17 2011
@@ -23,7 +23,6 @@ import javax.servlet.http.*;
 
 import org.apache.hadoop.chukwa.util.XssFilter;
 
-import java.util.Date;
 import java.util.Calendar;
 import java.util.TimeZone;
 import java.util.StringTokenizer;
@@ -32,14 +31,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import com.mdimension.jchronic.Chronic;
 import com.mdimension.jchronic.Options;
-import com.mdimension.jchronic.tags.Pointer;
 import com.mdimension.jchronic.utils.Span;
-import com.mdimension.jchronic.utils.Time;
-
 
 public class TimeHandler {
   private HttpSession session = null;
-  private HttpServletRequest request = null;
   private TimeZone tz = null;
   private long start = 0;
   private long end = 0;
@@ -76,9 +71,7 @@ public class TimeHandler {
 	Calendar now = Calendar.getInstance();
 	long l=now.getTimeInMillis();
 	d=d.trim();
-	if (d.compareToIgnoreCase("now")==0) {
-	    // do nothing because it is default to now.
-	} else {
+	if (d.compareToIgnoreCase("now")!=0) {
 	    Options options= new Options(false);
 	    options.setCompatibilityMode(true);
 	    options.setNow(now);
@@ -153,13 +146,10 @@ public class TimeHandler {
     xf = new XssFilter(request);
     Calendar now = Calendar.getInstance();
     this.session = request.getSession();
-    this.request = request;
-    String timeType = "last";
     if (request.getParameter("time_type") == null
         && session.getAttribute("time_type") == null
         && session.getAttribute("period") == null
         && request.getParameter("period") == null) {
-      timeType = "last";
       end = now.getTimeInMillis();
       start = end - 60 * 60 * 1000;
       session.setAttribute("period", "last1hr");
@@ -182,33 +172,7 @@ public class TimeHandler {
       String period = (String) session.getAttribute("period");
       parsePeriodValue(period);
     }
-    // if((request.getParameter("period")==null ||
-    // request.getParameter("period").equals("")) &&
-    // session.getAttribute("time_type")!=null) {
-    // timeType = (String)session.getAttribute("time_type");
-    // }
-    // if((request.getParameter("period")!=null &&
-    // !request.getParameter("period").equals("")) || (timeType!=null &&
-    // timeType.equals("last"))) {
-    // String period = request.getParameter("period");
-    // if(period == null) {
-    // period = (String) session.getAttribute("period");
-    // if(period == null) {
-    // period = "last1hr";
-    // session.setAttribute("period",period);
-    // }
-    // }
-    // // no time specified in request nor session, set default time in session.
-    // if(request.getParameter("time_type")!=null &&
-    // request.getParameter("time_type").equals("range")) {
-    // session.setAttribute("start", ""+start);
-    // session.setAttribute("end", ""+end);
-    // }
-    // } else {
-    // // no time specified in request, use session time.
-    // start = Long.parseLong((String) session.getAttribute("start"));
-    // end = Long.parseLong((String) session.getAttribute("end"));
-    // }
+
     SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
     SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
     SimpleDateFormat formatHour = new SimpleDateFormat("HH");

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java Wed Dec 21 20:16:17 2011
@@ -22,11 +22,15 @@ package org.apache.hadoop.chukwa.hicc;
 import java.io.*;
 import java.util.*;
 import org.json.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class Views {
   public JSONArray viewsData;
   private String path = System.getenv("CHUKWA_DATA_DIR")
       + "/views/workspace_view_list.cache";
+  private static final Log log = LogFactory.getLog(Views.class);
 
   static public String getContents(File aFile) {
     // ...checks on aFile are elided
@@ -63,6 +67,7 @@ public class Views {
     try {
       viewsData = new JSONArray(buffer);
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
   }
 
@@ -72,6 +77,7 @@ public class Views {
       owner = ((JSONObject) viewsData.get(i)).get("owner")
           .toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return owner;
   }
@@ -82,6 +88,7 @@ public class Views {
       permission = ((JSONObject) ((JSONObject) viewsData.get(i))
           .get("permission")).keys();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return permission;
   }
@@ -94,6 +101,7 @@ public class Views {
       JSONObject user = (JSONObject) permission.get(who);
       read = user.get("read").toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return read;
   }
@@ -103,6 +111,7 @@ public class Views {
     try {
       write = ((JSONObject) ((JSONObject) ((JSONObject) viewsData.get(i)).get("permission")).get(who)).get("write").toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return write;
   }
@@ -113,6 +122,7 @@ public class Views {
       description = ((JSONObject) viewsData.get(i)).get(
           "description").toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return description;
   }
@@ -122,6 +132,7 @@ public class Views {
     try {
       key = ((JSONObject) viewsData.get(i)).get("key").toString();
     } catch (JSONException e) {
+      log.debug(ExceptionUtil.getStackTrace(e));
     }
     return key;
   }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java Wed Dec 21 20:16:17 2011
@@ -23,12 +23,8 @@ import javax.servlet.jsp.JspException;
 import javax.servlet.jsp.tagext.SimpleTagSupport;
 import java.io.IOException;
 import java.util.*;
-import org.apache.hadoop.chukwa.hicc.Views;
 
 public class ViewsTag extends SimpleTagSupport {
-  private String key = null;
-  private String owner = null;
-  private String description = null;
   Views views = new Views();
 
   public void doTag() throws JspException, IOException {

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java Wed Dec 21 20:16:17 2011
@@ -24,16 +24,18 @@ import java.util.*;
 import javax.servlet.*;
 import javax.servlet.http.*;
 import java.sql.*;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.util.XssFilter;
 import org.json.*;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class Workspace extends HttpServlet {
   public static final long serialVersionUID = 101L;
+  private static final Log log = LogFactory.getLog(Workspace.class);
   private String path = System.getenv("CHUKWA_DATA_DIR");
-  private JSONObject hash = new JSONObject();
-  private String user = "admin";
-  private XssFilter xf = null;
+  transient private JSONObject hash = new JSONObject();
+  transient private XssFilter xf;
 
   @Override  
   protected void doTrace(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
@@ -44,7 +46,6 @@ public class Workspace extends HttpServl
       throws IOException, ServletException {
     xf = new XssFilter(request);
     response.setContentType("text/plain");
-    PrintWriter out = response.getWriter();
     String method = xf.getParameter("method");
     if (method.equals("get_views_list")) {
       getViewsList(request, response);
@@ -137,7 +138,9 @@ public class Workspace extends HttpServl
     }
     setContents(path + "/views/" + name + ".view", config);
     File deleteCache = new File(path + "/views/workspace_view_list.cache");
-    deleteCache.delete();
+    if(!deleteCache.delete()) {
+      log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
+    }
     genViewCache(path + "/views");
     aFile = new File(path + "/views/workspace_view_list.cache");
     String viewsCache = getContents(aFile);
@@ -146,19 +149,21 @@ public class Workspace extends HttpServl
 
   public void deleteView(HttpServletRequest request,
       HttpServletResponse response) throws IOException, ServletException {
-    PrintWriter out = response.getWriter();
     String name = xf.getParameter("name");
     File aFile = new File(path + "/views/" + name + ".view");
-    aFile.delete();
+    if(!aFile.delete()) {
+      log.warn("Can not delete " + path + "/views/" + name + ".view");
+    }
     File deleteCache = new File(path + "/views/workspace_view_list.cache");
-    deleteCache.delete();
+    if(!deleteCache.delete()) {
+      log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
+    }
     genViewCache(path + "/views");
   }
 
   public void getViewsList(HttpServletRequest request,
       HttpServletResponse response) throws IOException, ServletException {
     PrintWriter out = response.getWriter();
-    String format = xf.getParameter("format");
     genViewCache(path + "/views");
     File aFile = new File(path + "/views/workspace_view_list.cache");
     String viewsCache = getContents(aFile);
@@ -191,7 +196,9 @@ public class Workspace extends HttpServl
         throw new Exception("Rename view file failed");
       }
       File deleteCache = new File(path + "/views/workspace_view_list.cache");
-      deleteCache.delete();
+      if(!deleteCache.delete()) {
+        log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
+      }
       genViewCache(path + "/views");
       out.println("Workspace is stored successfully.");
     } catch (Exception e) {
@@ -204,7 +211,6 @@ public class Workspace extends HttpServl
     PrintWriter out = response.getWriter();
     String id = xf.getParameter("name");
     String config = request.getParameter("config");
-    File aFile = new File(path + "/views/" + id + ".view");
     setContents(path + "/views/" + id + ".view", config);
     out.println("Workspace is stored successfully.");
   }
@@ -212,7 +218,6 @@ public class Workspace extends HttpServl
   public void getWidgetList(HttpServletRequest request,
       HttpServletResponse response) throws IOException, ServletException {
     PrintWriter out = response.getWriter();
-    String format = xf.getParameter("format");
     genWidgetCache(path + "/descriptors");
     File aFile = new File(path + "/descriptors/workspace_plugin.cache");
     String viewsCache = getContents(aFile);
@@ -237,6 +242,7 @@ public class Workspace extends HttpServl
           jt.put("key", fn.substring(0, (fn.length() - 5)));
           cacheGroup[i] = jt;
         } catch (Exception e) {
+          log.debug(ExceptionUtil.getStackTrace(e));
         }
       }
       String viewList = convertObjectsToViewList(cacheGroup);
@@ -288,6 +294,7 @@ public class Workspace extends HttpServl
           JSONObject jt = new JSONObject(buffer);
           cacheGroup[i] = jt;
         } catch (Exception e) {
+          log.debug(ExceptionUtil.getStackTrace(e));
         }
       }
       String widgetList = convertObjectsToWidgetList(cacheGroup);
@@ -306,7 +313,6 @@ public class Workspace extends HttpServl
     } catch (Exception e) {
       System.err.println("JSON Exception: " + e.getMessage());
     }
-    JSONObject tmpHash = new JSONObject();
     for (int i = 0; i < objArray.length; i++) {
       try {
         String[] categoriesArray = objArray[i].get("categories").toString()
@@ -344,6 +350,7 @@ public class Workspace extends HttpServl
             subHash.put("node:" + id, tmpHash);
             subHash = tmpHash;
           } catch (JSONException ex) {
+            log.debug(ExceptionUtil.getStackTrace(e));
           }
         }
       }
@@ -356,14 +363,14 @@ public class Workspace extends HttpServl
       File view = new File(path + "/views/" + id + ".view");
       File newFile = new File(path + File.separator + "views" + File.separator
           + desc + ".view");
-      view.renameTo(newFile);
+      if(!view.renameTo(newFile)) {
+        log.warn("Can not rename " + path + "/views/" + id + ".view to " + 
+            path + File.separator + "views" + File.separator + desc + ".view");
+      }
     } catch (Exception e) {
       return false;
     }
     return true;
   }
 
-  private JSONObject filterViewsByPermission(String userid, JSONObject viewArray) {
-    return viewArray;
-  }
 }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java Wed Dec 21 20:16:17 2011
@@ -17,17 +17,11 @@
  */
 package org.apache.hadoop.chukwa.hicc.bean;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlType;
-import javax.xml.bind.annotation.XmlValue;
 
-import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.log4j.Logger;
 import org.json.simple.JSONArray;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java Wed Dec 21 20:16:17 2011
@@ -19,7 +19,6 @@ package org.apache.hadoop.chukwa.hicc.re
 
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.Set;
 
 import javax.servlet.http.HttpServletRequest;
@@ -34,16 +33,13 @@ import javax.ws.rs.WebApplicationExcepti
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
 
-import org.apache.hadoop.chukwa.datacollection.adaptor.sigar.SigarRunner;
 import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
 import org.apache.hadoop.chukwa.hicc.TimeHandler;
 import org.apache.hadoop.chukwa.hicc.bean.Series;
-import org.apache.log4j.Logger;
 import org.json.simple.JSONArray;
 
 @Path("/metrics")
 public class MetricsController {
-  private static Logger log = Logger.getLogger(MetricsController.class);
 
   @GET
   @Path("series/{table}/{family}/{column}/rowkey/{rkey}")
@@ -86,7 +82,6 @@ public class MetricsController {
   public String getSeriesBySessionAttribute(@Context HttpServletRequest request, @PathParam("table") String table, @PathParam("column") String column, @PathParam("sessionKey") String skey, @QueryParam("start") String start, @QueryParam("end") String end) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
     String buffer = "";
-    Series series;
     long startTime = 0;
     long endTime = 0;
     TimeHandler time = new TimeHandler(request);
@@ -152,8 +147,6 @@ public class MetricsController {
   @Produces("application/json")
   public String getColumnNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @QueryParam("start") String start, @QueryParam("end") String end, @DefaultValue("false") @QueryParam("fullScan") boolean fullScan) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
-    String buffer = "";
-    Series series;
     long startTime = 0;
     long endTime = 0;
     TimeHandler time = new TimeHandler(request);
@@ -185,8 +178,6 @@ public class MetricsController {
   @Produces("application/json")
   public String getRowNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @PathParam("column") String column, @QueryParam("start") String start, @QueryParam("end") String end, @QueryParam("fullScan") @DefaultValue("false") boolean fullScan) {
     SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
-    String buffer = "";
-    Series series;
     long startTime = 0;
     long endTime = 0;
     TimeHandler time = new TimeHandler(request);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/ChukwaInputFormat.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/ChukwaInputFormat.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/ChukwaInputFormat.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/ChukwaInputFormat.java Wed Dec 21 20:16:17 2011
@@ -24,8 +24,6 @@ import java.util.regex.*;
 import org.apache.hadoop.chukwa.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.*;
 import org.apache.log4j.Logger;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetrics.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetrics.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetrics.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetrics.java Wed Dec 21 20:16:17 2011
@@ -20,8 +20,6 @@ package org.apache.hadoop.chukwa.inputto
 
 import java.util.HashMap;
 
-import org.w3c.dom.Element;
-
 public interface ChukwaMetrics {
   String getKey();
   HashMap<String, String> getAttributes();

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetricsList.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetricsList.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetricsList.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/ChukwaMetricsList.java Wed Dec 21 20:16:17 2011
@@ -21,12 +21,10 @@ package org.apache.hadoop.chukwa.inputto
 import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.Map.Entry;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.transform.Transformer;
 import javax.xml.transform.TransformerFactory;
 import javax.xml.transform.dom.DOMSource;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java Wed Dec 21 20:16:17 2011
@@ -25,7 +25,7 @@ import java.util.TimerTask;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.util.DaemonWatcher;
-import org.apache.hadoop.chukwa.util.PidFile;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class JPluginAgent {
   private static Log log = LogFactory.getLog(JPluginAgent.class);
@@ -124,6 +124,7 @@ public class JPluginAgent {
             period * 1000);
       }
     } catch (Exception ex) {
+      log.debug(ExceptionUtil.getStackTrace(ex));
     }
   }
 }

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/ChukwaDailyRollingFileAppender.java Wed Dec 21 20:16:17 2011
@@ -39,7 +39,6 @@ import java.util.Locale;
 import java.util.TimeZone;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
 import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
 import org.apache.hadoop.chukwa.datacollection.controller.ClientFinalizer;
 import org.apache.hadoop.chukwa.util.AdaptorNamingUtils;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4JMetricsContext.java Wed Dec 21 20:16:17 2011
@@ -25,9 +25,6 @@ package org.apache.hadoop.chukwa.inputto
  * Copy chukwa-hadoop-*-client.jar and json.jar to HADOOP_HOME/lib
  * 
  */
-import java.io.File;
-import java.io.IOException;
-
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsException;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
@@ -38,7 +35,8 @@ import org.json.simple.JSONObject;
 import java.util.TreeMap;
 import java.util.Map;
 import java.util.Collection;
-import java.util.List;
+import java.io.IOException;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class Log4JMetricsContext extends AbstractMetricsContext {
   Logger log = Logger.getLogger(Log4JMetricsContext.class);
@@ -67,6 +65,7 @@ public class Log4JMetricsContext extends
       try {
         period = Integer.parseInt(periodStr);
       } catch (NumberFormatException nfe) {
+        log.debug(ExceptionUtil.getStackTrace(nfe));
       }
       if (period <= 0) {
         throw new MetricsException("Invalid period: " + periodStr);

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4jMetricsSink.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4jMetricsSink.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4jMetricsSink.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/log4j/Log4jMetricsSink.java Wed Dec 21 20:16:17 2011
@@ -21,13 +21,8 @@ package org.apache.hadoop.chukwa.inputto
 import org.json.simple.JSONObject;
 import org.apache.log4j.Logger;
 import org.apache.log4j.PatternLayout;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.PrintWriter;
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.hadoop.metrics2.Metric;
-import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/ErStreamHandler.java Wed Dec 21 20:16:17 2011
@@ -18,11 +18,9 @@
 package org.apache.hadoop.chukwa.inputtools.mdl;
 
 
-import java.lang.Thread;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.BufferedReader;
-import java.lang.StringBuffer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java Wed Dec 21 20:16:17 2011
@@ -19,8 +19,6 @@
 package org.apache.hadoop.chukwa.inputtools.mdl;
 
 
-import java.io.IOException;
-import java.io.File;
 import java.io.*;
 import java.lang.management.ManagementFactory;
 import java.nio.channels.*;

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java Wed Dec 21 20:16:17 2011
@@ -18,17 +18,11 @@
 package org.apache.hadoop.chukwa.inputtools.mdl;
 
 
-import java.lang.Thread;
-import java.lang.management.ManagementFactory;
-import java.io.FileOutputStream;
 import java.sql.SQLException;
-import java.io.IOException;
-import java.io.File;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.chukwa.inputtools.mdl.TorqueInfoProcessor;
-import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
 import org.apache.hadoop.chukwa.util.PidFile;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class TorqueDataLoader {
   private static Log log = LogFactory.getLog("TorqueDataLoader");
@@ -86,7 +80,7 @@ public class TorqueDataLoader {
         try {
           Thread.sleep(16 * 1000);
         } catch (InterruptedException e) {
-          ;
+          log.debug(ExceptionUtil.getStackTrace(e));
         }
         tp.shutdown();
         log.error("process died...." + ex.getMessage());

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueInfoProcessor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueInfoProcessor.java?rev=1221864&r1=1221863&r2=1221864&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueInfoProcessor.java (original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueInfoProcessor.java Wed Dec 21 20:16:17 2011
@@ -19,34 +19,22 @@ package org.apache.hadoop.chukwa.inputto
 
 
 import java.sql.SQLException;
-import java.sql.ResultSet;
-import java.lang.Exception;
 import java.util.Calendar;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.TreeMap;
 import java.util.Iterator;
-import java.lang.StringBuffer;
 import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.lang.Thread;
 import java.util.Timer;
-import java.lang.ProcessBuilder;
-import java.lang.Process;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.lang.InterruptedException;
-import java.lang.System;
 import java.util.Date;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
-import org.apache.hadoop.chukwa.inputtools.mdl.TorqueTimerTask;
-import org.apache.hadoop.chukwa.inputtools.mdl.ErStreamHandler;
-import org.apache.hadoop.chukwa.util.DatabaseWriter;
 
 public class TorqueInfoProcessor {
 
@@ -291,7 +279,6 @@ public class TorqueInfoProcessor {
       SQLException {
     TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
     String userId = aJobData.get("userId");
-    String process = aJobData.get("process");
 
     StringBuffer sb = new StringBuffer();
     sb.append(torqueBinDir).append("/tracejob -n 10 -l -m -s ").append(hodId);
@@ -416,7 +403,6 @@ public class TorqueInfoProcessor {
 
     long currentTime = System.currentTimeMillis();
     currentTime = currentTime - currentTime % (60 * 1000);
-    Timestamp timestamp = new Timestamp(currentTime);
 
     Set<String> hodIds = currentHodJobs.keySet();