You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by th...@apache.org on 2016/08/26 21:10:19 UTC

[1/6] apex-malhar git commit: Fixed checkstyle errors for demos.

Repository: apex-malhar
Updated Branches:
  refs/heads/master 0a1adff8a -> 7d9386d2a


http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/ApplicationWithQuerySupport.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/ApplicationWithQuerySupport.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/ApplicationWithQuerySupport.java
index fd67bf6..7e5bb93 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/ApplicationWithQuerySupport.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/ApplicationWithQuerySupport.java
@@ -20,23 +20,22 @@ package com.datatorrent.demos.wordcount;
 
 import java.net.URI;
 
-import org.apache.commons.lang.StringUtils;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.api.annotation.ApplicationAnnotation;
-import com.datatorrent.api.StreamingApplication;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.Operator;
+import com.datatorrent.api.StreamingApplication;
+import com.datatorrent.api.annotation.ApplicationAnnotation;
 
 import com.datatorrent.lib.appdata.schemas.SchemaUtils;
 import com.datatorrent.lib.appdata.snapshot.AppDataSnapshotServerMap;
+import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.io.PubSubWebSocketAppDataQuery;
 import com.datatorrent.lib.io.PubSubWebSocketAppDataResult;
-import com.datatorrent.lib.io.ConsoleOutputOperator;
-
-import org.apache.hadoop.conf.Configuration;
 
 /**
  * Simple demo that computes word frequencies from any file dropped into a
@@ -49,7 +48,7 @@ import org.apache.hadoop.conf.Configuration;
  * <p>
  * @since 3.2.0
  */
-@ApplicationAnnotation(name="TopNWordsWithQueries")
+@ApplicationAnnotation(name = "TopNWordsWithQueries")
 public class ApplicationWithQuerySupport implements StreamingApplication
 {
   private static final Logger LOG = LoggerFactory.getLogger(ApplicationWithQuerySupport.class);
@@ -87,13 +86,13 @@ public class ApplicationWithQuerySupport implements StreamingApplication
 
     String gatewayAddress = dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS);
 
-    if ( ! StringUtils.isEmpty(gatewayAddress)) {        // add query support
+    if (!StringUtils.isEmpty(gatewayAddress)) {        // add query support
       URI uri = URI.create("ws://" + gatewayAddress + "/pubsub");
 
       AppDataSnapshotServerMap snapshotServerFile
-        = dag.addOperator("snapshotServerFile", new AppDataSnapshotServerMap());
+          = dag.addOperator("snapshotServerFile", new AppDataSnapshotServerMap());
       AppDataSnapshotServerMap snapshotServerGlobal
-        = dag.addOperator("snapshotServerGlobal", new AppDataSnapshotServerMap());
+          = dag.addOperator("snapshotServerGlobal", new AppDataSnapshotServerMap());
 
       String snapshotServerJSON = SchemaUtils.jarResourceFileToString(SNAPSHOT_SCHEMA);
       snapshotServerFile.setSnapshotSchemaJSON(snapshotServerJSON);
@@ -108,19 +107,17 @@ public class ApplicationWithQuerySupport implements StreamingApplication
       snapshotServerGlobal.setEmbeddableQueryInfoProvider(wsQueryGlobal);
 
       PubSubWebSocketAppDataResult wsResultFile
-        = dag.addOperator("wsResultFile", new PubSubWebSocketAppDataResult());
+          = dag.addOperator("wsResultFile", new PubSubWebSocketAppDataResult());
       PubSubWebSocketAppDataResult wsResultGlobal
-        = dag.addOperator("wsResultGlobal", new PubSubWebSocketAppDataResult());
+          = dag.addOperator("wsResultGlobal", new PubSubWebSocketAppDataResult());
       wsResultFile.setUri(uri);
       wsResultGlobal.setUri(uri);
 
       Operator.InputPort<String> queryResultFilePort = wsResultFile.input;
       Operator.InputPort<String> queryResultGlobalPort = wsResultGlobal.input;
 
-      dag.addStream("WordCountsFile", fileWordCount.outputPerFile,
-                    snapshotServerFile.input, console.input);
-      dag.addStream("WordCountsGlobal", fileWordCount.outputGlobal,
-                    snapshotServerGlobal.input);
+      dag.addStream("WordCountsFile", fileWordCount.outputPerFile, snapshotServerFile.input, console.input);
+      dag.addStream("WordCountsGlobal", fileWordCount.outputGlobal, snapshotServerGlobal.input);
 
       dag.addStream("ResultFile", snapshotServerFile.queryResult, queryResultFilePort);
       dag.addStream("ResultGlobal", snapshotServerGlobal.queryResult, queryResultGlobalPort);
@@ -129,7 +126,7 @@ public class ApplicationWithQuerySupport implements StreamingApplication
       dag.addStream("WordCounts", fileWordCount.outputPerFile, console.input);
     }
 
-    System.out.println("done with populateDAG, isDebugEnabled = " + LOG.isDebugEnabled());
+    LOG.info("done with populateDAG, isDebugEnabled = " + LOG.isDebugEnabled());
     LOG.info("Returning from populateDAG");
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/FileWordCount.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/FileWordCount.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/FileWordCount.java
index 8eb004a..e8a91b2 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/FileWordCount.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/FileWordCount.java
@@ -28,11 +28,11 @@ import java.util.Map;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.common.util.BaseOperator;
 
 /**
@@ -169,14 +169,14 @@ public class FileWordCount extends BaseOperator
    * Output port for current file output
    */
   public final transient DefaultOutputPort<List<Map<String, Object>>>
-    outputPerFile = new DefaultOutputPort<>();
+      outputPerFile = new DefaultOutputPort<>();
 
   /**
    * Output port for global output
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<List<Map<String, Object>>>
-    outputGlobal = new DefaultOutputPort<>();
+      outputGlobal = new DefaultOutputPort<>();
 
   /**
    * Tuple is singleton map {@code fileName => TopNMap} where {@code TopNMap} is the final
@@ -184,12 +184,13 @@ public class FileWordCount extends BaseOperator
    * {@code endWindow()} call after an EOF
    */
   public final transient DefaultOutputPort<Map<String, Object>>
-    fileOutput = new DefaultOutputPort<>();
+      fileOutput = new DefaultOutputPort<>();
 
   /**
    * Get the number of top (word, frequency) pairs that will be output
    */
-  public int getTopN() {
+  public int getTopN()
+  {
     return topN;
   }
 
@@ -197,7 +198,8 @@ public class FileWordCount extends BaseOperator
    * Set the number of top (word, frequency) pairs that will be output
    * @param n The new number
    */
-  public void setTopN(int n) {
+  public void setTopN(int n)
+  {
     topN = n;
   }
 
@@ -250,8 +252,7 @@ public class FileWordCount extends BaseOperator
       return;
     }
 
-    LOG.info("FileWordCount: endWindow for {}, wordMapFile.size = {}, topN = {}",
-             fileName, wordMapFile.size(), topN);
+    LOG.info("FileWordCount: endWindow for {}, wordMapFile.size = {}, topN = {}", fileName, wordMapFile.size(), topN);
 
     // get topN list for this file and, if we have EOF, emit to fileOutput port
 
@@ -293,13 +294,15 @@ public class FileWordCount extends BaseOperator
     final ArrayList<WCPair> list = new ArrayList<>(map.values());
 
     // sort entries in descending order of frequency
-    Collections.sort(list, new Comparator<WCPair>() {
-        @Override
-        public int compare(WCPair o1, WCPair o2) {
-          return (int)(o2.freq - o1.freq);
-        }
+    Collections.sort(list, new Comparator<WCPair>()
+    {
+      @Override
+      public int compare(WCPair o1, WCPair o2)
+      {
+        return (int)(o2.freq - o1.freq);
+      }
     });
-  
+
     if (topN > 0) {
       list.subList(topN, map.size()).clear();      // retain only the first topN entries
     }
@@ -329,13 +332,15 @@ public class FileWordCount extends BaseOperator
     fileFinalList.addAll(map.values());
 
     // sort entries in descending order of frequency
-    Collections.sort(fileFinalList, new Comparator<WCPair>() {
-        @Override
-        public int compare(WCPair o1, WCPair o2) {
-          return (int)(o2.freq - o1.freq);
-        }
+    Collections.sort(fileFinalList, new Comparator<WCPair>()
+    {
+      @Override
+      public int compare(WCPair o1, WCPair o2)
+      {
+        return (int)(o2.freq - o1.freq);
+      }
     });
-  
+
     if (topN > 0) {
       fileFinalList.subList(topN, map.size()).clear();      // retain only the first topN entries
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/LineReader.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/LineReader.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/LineReader.java
index fe10d73..8a1a57b 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/LineReader.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/LineReader.java
@@ -23,13 +23,13 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 
-import org.apache.hadoop.fs.Path;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import org.apache.hadoop.fs.Path;
+
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.lib.io.fs.AbstractFileInputOperator;
 
 /**

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WCPair.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WCPair.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WCPair.java
index 1817f2b..bb67622 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WCPair.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WCPair.java
@@ -23,8 +23,8 @@ package com.datatorrent.demos.wordcount;
  *
  * @since 3.2.0
  */
-public class WCPair {
-
+public class WCPair
+{
   /**
    * The word
    */
@@ -38,20 +38,25 @@ public class WCPair {
   /**
    * Default constructor
    */
-  public WCPair() {}
+  public WCPair()
+  {
+
+  }
 
   /**
    * Create new object with given values
    * @param w The word
    * @param f The frequency
    */
-  public WCPair(String w, int f) {
+  public WCPair(String w, int f)
+  {
     word = w;
     freq = f;
   }
-  
+
   @Override
-  public String toString() {
+  public String toString()
+  {
     return String.format("(%s, %d)", word, freq);
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WindowWordCount.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WindowWordCount.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WindowWordCount.java
index 999f32f..0edfd1e 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WindowWordCount.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WindowWordCount.java
@@ -80,7 +80,9 @@ public class WindowWordCount extends BaseOperator
     LOG.info("WindowWordCount: endWindow");
 
     // got EOF; if no words found, do nothing
-    if (wordMap.isEmpty()) return;
+    if (wordMap.isEmpty()) {
+      return;
+    }
 
     // have some words; emit single map and reset for next file
     final ArrayList<WCPair> list = new ArrayList<>(wordMap.values());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountInputOperator.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountInputOperator.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountInputOperator.java
index 3ab20c6..3a88bab 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountInputOperator.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountInputOperator.java
@@ -18,84 +18,92 @@
  */
 package com.datatorrent.demos.wordcount;
 
-import com.datatorrent.lib.io.SimpleSinglePortInputOperator;
-
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.lib.io.SimpleSinglePortInputOperator;
+
 /**
  * <p>WordCountInputOperator class.</p>
  *
  * @since 0.3.2
  */
-public class WordCountInputOperator extends SimpleSinglePortInputOperator<String> implements Runnable {
-
-    private static final Logger logger = LoggerFactory.getLogger(WordCountInputOperator.class);
-    protected long averageSleep = 300;
-    protected long sleepPlusMinus = 100;
-    protected String fileName = "com/datatorrent/demos/wordcount/samplefile.txt";
+public class WordCountInputOperator extends SimpleSinglePortInputOperator<String> implements Runnable
+{
 
-    public void setAverageSleep(long as) {
-        averageSleep = as;
-    }
+  private static final Logger logger = LoggerFactory.getLogger(WordCountInputOperator.class);
+  protected long averageSleep = 300;
+  protected long sleepPlusMinus = 100;
+  protected String fileName = "com/datatorrent/demos/wordcount/samplefile.txt";
 
-    public void setSleepPlusMinus(long spm) {
-        sleepPlusMinus = spm;
-    }
+  public void setAverageSleep(long as)
+  {
+    averageSleep = as;
+  }
 
-    public void setFileName(String fn) {
-        fileName = fn;
-    }
+  public void setSleepPlusMinus(long spm)
+  {
+    sleepPlusMinus = spm;
+  }
 
-    @Override
-    public void run() {
-        BufferedReader br = null;
-        DataInputStream in = null;
-        InputStream fstream = null;
+  public void setFileName(String fn)
+  {
+    fileName = fn;
+  }
 
-        while (true) {
-            try {
-                String line;
-                fstream = this.getClass().getClassLoader().getResourceAsStream(fileName);
+  @Override
+  public void run()
+  {
+    BufferedReader br = null;
+    DataInputStream in = null;
+    InputStream fstream = null;
 
-                in = new DataInputStream(fstream);
-                br = new BufferedReader(new InputStreamReader(in));
+    while (true) {
+      try {
+        String line;
+        fstream = this.getClass().getClassLoader().getResourceAsStream(fileName);
 
-                while ((line = br.readLine()) != null) {
-                    String[] words = line.trim().split("[\\p{Punct}\\s\\\"\\'\u201c\u201d]+");
-                    for (String word : words) {
-                        word = word.trim().toLowerCase();
-                        if (!word.isEmpty()) {
-                            //System.out.println("Sending "+word);
-                            outputPort.emit(word);
-                        }
-                    }
-                    try {
-                        Thread.sleep(averageSleep + (new Double(sleepPlusMinus * (Math.random() * 2 - 1))).longValue());
-                    } catch (InterruptedException ex) {
-                        // nothing
-                    }
-                }
+        in = new DataInputStream(fstream);
+        br = new BufferedReader(new InputStreamReader(in));
 
-            } catch (IOException ex) {
-                logger.debug(ex.toString());
-            } finally {
-                try {
-                    if (br != null) {
-                        br.close();
-                    }
-                    if (in != null) {
-                        in.close();
-                    }
-                    if (fstream != null) {
-                        fstream.close();
-                    }
-                } catch (IOException exc) {
-                    // nothing
-                }
+        while ((line = br.readLine()) != null) {
+          String[] words = line.trim().split("[\\p{Punct}\\s\\\"\\'\u201c\u201d]+");
+          for (String word : words) {
+            word = word.trim().toLowerCase();
+            if (!word.isEmpty()) {
+              outputPort.emit(word);
             }
+          }
+          try {
+            Thread.sleep(averageSleep + (new Double(sleepPlusMinus * (Math.random() * 2 - 1))).longValue());
+          } catch (InterruptedException ex) {
+            // nothing
+          }
+        }
+
+      } catch (IOException ex) {
+        logger.debug(ex.toString());
+      } finally {
+        try {
+          if (br != null) {
+            br.close();
+          }
+          if (in != null) {
+            in.close();
+          }
+          if (fstream != null) {
+            fstream.close();
+          }
+        } catch (IOException exc) {
+          // nothing
         }
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountWriter.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountWriter.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountWriter.java
index 62c41d3..30aab10 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountWriter.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordCountWriter.java
@@ -40,7 +40,7 @@ public class WordCountWriter extends AbstractFileOutputOperator<Map<String, Obje
   private static final String nl = System.lineSeparator();
 
   private String fileName;    // current file name
-  private transient final StringBuilder sb = new StringBuilder();
+  private final transient StringBuilder sb = new StringBuilder();
 
   /**
    * {@inheritDoc}
@@ -83,15 +83,17 @@ public class WordCountWriter extends AbstractFileOutputOperator<Map<String, Obje
 
     // get first and only pair; key is the fileName and is ignored here
     final Map.Entry<String, Object> entry = tuple.entrySet().iterator().next();
-    final List<WCPair> list = (List<WCPair>) entry.getValue();
+    final List<WCPair> list = (List<WCPair>)entry.getValue();
 
     if (sb.length() > 0) {        // clear buffer
       sb.delete(0, sb.length());
     }
 
     for ( WCPair pair : list ) {
-      sb.append(pair.word); sb.append(" : ");
-      sb.append(pair.freq); sb.append(nl);
+      sb.append(pair.word);
+      sb.append(" : ");
+      sb.append(pair.freq);
+      sb.append(nl);
     }
 
     final String data = sb.toString();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordReader.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordReader.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordReader.java
index 56d9294..58c44b4 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordReader.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/WordReader.java
@@ -46,8 +46,8 @@ public class WordReader extends BaseOperator
   /**
    * Input port on which lines from the current file are received
    */
-  public final transient DefaultInputPort<String>
-    input = new DefaultInputPort<String>() {
+  public final transient DefaultInputPort<String> input = new DefaultInputPort<String>()
+  {
 
     @Override
     public void process(String line)
@@ -55,7 +55,9 @@ public class WordReader extends BaseOperator
       // line; split it into words and emit them
       final String[] words = nonWord.split(line);
       for (String word : words) {
-        if (word.isEmpty()) continue;
+        if (word.isEmpty()) {
+          continue;
+        }
         output.emit(word);
       }
     }
@@ -65,7 +67,8 @@ public class WordReader extends BaseOperator
    * Returns the regular expression that matches strings between words
    * @return Regular expression for strings that separate words
    */
-  public String getNonWordStr() {
+  public String getNonWordStr()
+  {
     return nonWordStr;
   }
 
@@ -73,7 +76,8 @@ public class WordReader extends BaseOperator
    * Sets the regular expression that matches strings between words
    * @param regex New regular expression for strings that separate words
    */
-  public void setNonWordStr(String regex) {
+  public void setNonWordStr(String regex)
+  {
     nonWordStr = regex;
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/test/java/com/datatorrent/demos/wordcount/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/test/java/com/datatorrent/demos/wordcount/ApplicationTest.java b/demos/wordcount/src/test/java/com/datatorrent/demos/wordcount/ApplicationTest.java
index 6fc0dfa..1df0459 100644
--- a/demos/wordcount/src/test/java/com/datatorrent/demos/wordcount/ApplicationTest.java
+++ b/demos/wordcount/src/test/java/com/datatorrent/demos/wordcount/ApplicationTest.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.demos.wordcount;
 
-import com.datatorrent.api.LocalMode;
-import com.datatorrent.demos.wordcount.Application;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  *
  */
 public class ApplicationTest
 {
+  private final transient Logger LOG = LoggerFactory.getLogger(ApplicationTest.class);
   public ApplicationTest()
   {
   }
@@ -36,14 +38,14 @@ public class ApplicationTest
   public void testSomeMethod() throws Exception
   {
     LocalMode lma = LocalMode.newInstance();
-    Configuration conf =new Configuration(false);
+    Configuration conf = new Configuration(false);
     conf.addResource("dt-site-wordcount.xml");
     lma.prepareDAG(new Application(), conf);
     LocalMode.Controller lc = lma.getController();
     long start = System.currentTimeMillis();
     lc.run(300000);
     long end = System.currentTimeMillis();
-    long time = end -start;
-    System.out.println("Test used "+time+" ms");
+    long time = end - start;
+    LOG.debug("Test used " + time + " ms");
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQL.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQL.java b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQL.java
index 1da5b6c..50b306d 100644
--- a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQL.java
+++ b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQL.java
@@ -18,28 +18,29 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-import com.datatorrent.api.StreamingApplication;
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.DAG;
+import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.streamquery.AbstractSqlStreamOperator;
 import com.datatorrent.lib.streamquery.DerbySqlStreamOperator;
 
-import org.apache.hadoop.conf.Configuration;
-
 /**
  * This demo will output the stock market data from yahoo finance
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="YahooFinanceWithDerbySQLDemo")
+@ApplicationAnnotation(name = "YahooFinanceWithDerbySQLDemo")
 public class ApplicationWithDerbySQL implements StreamingApplication
 {
   @Override
-  public void populateDAG(DAG dag, Configuration conf) {
-      String symbolStr = conf.get(ApplicationWithDerbySQL.class.getName() + ".tickerSymbols", "YHOO,GOOG,AAPL,FB,AMZN,NFLX,IBM");
+  public void populateDAG(DAG dag, Configuration conf)
+  {
+    String symbolStr = conf.get(ApplicationWithDerbySQL.class.getName() + ".tickerSymbols", "YHOO,GOOG,AAPL,FB,AMZN,NFLX,IBM");
 
-      String[] symbols = symbolStr.split(",");
+    String[] symbols = symbolStr.split(",");
 
     YahooFinanceCSVInputOperator input1 = dag.addOperator("input1", new YahooFinanceCSVInputOperator());
     YahooFinanceCSVInputOperator input2 = dag.addOperator("input2", new YahooFinanceCSVInputOperator());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/StockTickInput.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/StockTickInput.java b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/StockTickInput.java
index b658575..01e3ce9 100644
--- a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/StockTickInput.java
+++ b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/StockTickInput.java
@@ -18,26 +18,33 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-import au.com.bytecode.opencsv.CSVReader;
-
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-import com.datatorrent.lib.util.KeyValPair;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
 import javax.validation.constraints.NotNull;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpStatus;
 import org.apache.commons.httpclient.cookie.CookiePolicy;
 import org.apache.commons.httpclient.methods.GetMethod;
 import org.apache.commons.httpclient.params.DefaultHttpParams;
 import org.apache.hadoop.util.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.lib.util.KeyValPair;
+
+import au.com.bytecode.opencsv.CSVReader;
 
 /**
  * This operator sends price, volume and time into separate ports and calculates incremental volume.
@@ -86,14 +93,14 @@ public class StockTickInput implements InputOperator
   private String prepareURL()
   {
     String str = "http://download.finance.yahoo.com/d/quotes.csv?s=";
-      for (int i = 0; i < symbols.length; i++) {
-        if (i != 0) {
-          str += ",";
-        }
-        str += symbols[i];
+    for (int i = 0; i < symbols.length; i++) {
+      if (i != 0) {
+        str += ",";
       }
-      str += "&f=sl1vt1&e=.csv";
-      return str;
+      str += symbols[i];
+    }
+    str += "&f=sl1vt1&e=.csv";
+    return str;
   }
 
   @Override
@@ -118,8 +125,7 @@ public class StockTickInput implements InputOperator
       int statusCode = client.executeMethod(method);
       if (statusCode != HttpStatus.SC_OK) {
         logger.error("Method failed: " + method.getStatusLine());
-      }
-      else {
+      } else {
         InputStream istream = method.getResponseBodyAsStream();
         // Process response
         InputStreamReader isr = new InputStreamReader(istream);
@@ -150,11 +156,9 @@ public class StockTickInput implements InputOperator
         }
       }
       Thread.sleep(readIntervalMillis);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       logger.debug(ex.toString());
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       logger.debug(ex.toString());
     }
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceApplication.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceApplication.java b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceApplication.java
index debf91d..a6aaece 100644
--- a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceApplication.java
+++ b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceApplication.java
@@ -18,7 +18,7 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-
+import org.apache.hadoop.conf.Configuration;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.Context.PortContext;
 import com.datatorrent.api.DAG;
@@ -32,7 +32,6 @@ import com.datatorrent.lib.multiwindow.SimpleMovingAverage;
 import com.datatorrent.lib.stream.ConsolidatorKeyVal;
 import com.datatorrent.lib.util.BaseKeyValueOperator.DefaultPartitionCodec;
 import com.datatorrent.lib.util.HighLow;
-import org.apache.hadoop.conf.Configuration;
 
 /**
  * Yahoo! Finance Application Demo :<br>
@@ -191,7 +190,7 @@ import org.apache.hadoop.conf.Configuration;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="YahooFinanceDemo")
+@ApplicationAnnotation(name = "YahooFinanceDemo")
 public class YahooFinanceApplication implements StreamingApplication
 {
   protected int streamingWindowSizeMilliSeconds = 1000; // 1 second
@@ -329,8 +328,8 @@ public class YahooFinanceApplication implements StreamingApplication
   /**
    * Populate Yahoo Finance Demo Application DAG.
    */
-	@SuppressWarnings("unchecked")
-	@Override
+  @SuppressWarnings("unchecked")
+  @Override
   public void populateDAG(DAG dag, Configuration conf)
   {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceCSVInputOperator.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceCSVInputOperator.java b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceCSVInputOperator.java
index 0cdbfbb..cf3801e 100644
--- a/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceCSVInputOperator.java
+++ b/demos/yahoofinance/src/main/java/com/datatorrent/demos/yahoofinance/YahooFinanceCSVInputOperator.java
@@ -18,23 +18,26 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-import au.com.bytecode.opencsv.CSVReader;
-
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.lib.io.SimpleSinglePortInputOperator;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpStatus;
 import org.apache.commons.httpclient.cookie.CookiePolicy;
 import org.apache.commons.httpclient.methods.GetMethod;
 import org.apache.commons.httpclient.params.DefaultHttpParams;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.lib.io.SimpleSinglePortInputOperator;
+
+import au.com.bytecode.opencsv.CSVReader;
 
 /**
  * Grabs Yahoo Finance quotes data and emits HashMap, with key equals the format name (e.g. "s0") <p>
@@ -103,7 +106,7 @@ public class YahooFinanceCSVInputOperator extends SimpleSinglePortInputOperator<
       if (i != 0) {
         str += ",";
       }
-       str += symbolList.get(i);
+      str += symbolList.get(i);
     }
     str += "&f=";
     for (String format: parameterList) {
@@ -129,9 +132,8 @@ public class YahooFinanceCSVInputOperator extends SimpleSinglePortInputOperator<
       try {
         int statusCode = client.executeMethod(method);
         if (statusCode != HttpStatus.SC_OK) {
-          System.err.println("Method failed: " + method.getStatusLine());
-        }
-        else {
+          logger.error("Method failed: " + method.getStatusLine());
+        } else {
           InputStream istream;
           istream = method.getResponseBodyAsStream();
           // Process response
@@ -148,11 +150,9 @@ public class YahooFinanceCSVInputOperator extends SimpleSinglePortInputOperator<
           }
         }
         Thread.sleep(readIntervalMillis);
-      }
-      catch (InterruptedException ex) {
+      } catch (InterruptedException ex) {
         logger.debug(ex.toString());
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         logger.debug(ex.toString());
       }
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationTest.java b/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationTest.java
index 587f9de..c038e61 100644
--- a/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationTest.java
+++ b/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationTest.java
@@ -18,10 +18,9 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-import com.datatorrent.api.LocalMode;
-import com.datatorrent.demos.yahoofinance.YahooFinanceApplication;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  * Run Yahoo Finance application demo.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQLTest.java
----------------------------------------------------------------------
diff --git a/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQLTest.java b/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQLTest.java
index b430f92..7b134f5 100644
--- a/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQLTest.java
+++ b/demos/yahoofinance/src/test/java/com/datatorrent/demos/yahoofinance/ApplicationWithDerbySQLTest.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.demos.yahoofinance;
 
-import com.datatorrent.api.LocalMode;
-import com.datatorrent.demos.yahoofinance.ApplicationWithDerbySQL;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  *
  */
 public class ApplicationWithDerbySQLTest
 {
+  private final transient Logger LOG = LoggerFactory.getLogger(ApplicationWithDerbySQLTest.class);
   public ApplicationWithDerbySQLTest()
   {
   }
@@ -42,7 +44,7 @@ public class ApplicationWithDerbySQLTest
     long start = System.currentTimeMillis();
     lc.run();
     long end = System.currentTimeMillis();
-    long time = end -start;
-    System.out.println("Test used "+time+" ms");
+    long time = end - start;
+    LOG.debug("Test used " + time + " ms");
   }
 }


[3/6] apex-malhar git commit: Fixed checkstyle errors for demos.

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/CalculatorOperator.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/CalculatorOperator.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/CalculatorOperator.java
index d140f77..8f68dab 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/CalculatorOperator.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/CalculatorOperator.java
@@ -20,8 +20,6 @@ package com.datatorrent.demos.machinedata.operator;
 
 import java.io.Serializable;
 import java.math.BigDecimal;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -33,14 +31,16 @@ import com.google.common.base.Objects;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import com.datatorrent.lib.codec.KryoSerializableStreamCodec;
-import com.datatorrent.lib.util.KeyValPair;
-
-import com.datatorrent.api.*;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.StreamCodec;
 import com.datatorrent.common.util.BaseOperator;
-
-import com.datatorrent.demos.machinedata.data.*;
+import com.datatorrent.demos.machinedata.data.MachineInfo;
+import com.datatorrent.demos.machinedata.data.MachineKey;
+import com.datatorrent.demos.machinedata.data.ResourceType;
 import com.datatorrent.demos.machinedata.util.DataTable;
+import com.datatorrent.lib.codec.KryoSerializableStreamCodec;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  * <p>
@@ -179,13 +179,12 @@ public class CalculatorOperator extends BaseOperator
   {
 
     double val = (kthPercentile * sorted.size()) / 100.0;
-    if (val == (int) val) {
+    if (val == (int)val) {
       // Whole number
-      int idx = (int) val - 1;
+      int idx = (int)val - 1;
       return (sorted.get(idx) + sorted.get(idx + 1)) / 2.0;
-    }
-    else {
-      int idx = (int) Math.round(val) - 1;
+    } else {
+      int idx = (int)Math.round(val) - 1;
       return sorted.get(idx);
     }
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingOperator.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingOperator.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingOperator.java
index 29f700f..bbfd547 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingOperator.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingOperator.java
@@ -18,18 +18,6 @@
  */
 package com.datatorrent.demos.machinedata.operator;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-
-import com.datatorrent.demos.machinedata.data.MachineInfo;
-import com.datatorrent.demos.machinedata.data.MachineKey;
-import com.datatorrent.demos.machinedata.data.AverageData;
-import com.datatorrent.lib.util.KeyHashValPair;
-import com.datatorrent.lib.util.KeyValPair;
-
-import com.google.common.collect.Maps;
-
 import java.math.BigDecimal;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
@@ -38,6 +26,18 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.google.common.collect.Maps;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
+import com.datatorrent.demos.machinedata.data.AverageData;
+import com.datatorrent.demos.machinedata.data.MachineInfo;
+import com.datatorrent.demos.machinedata.data.MachineKey;
+import com.datatorrent.lib.util.KeyHashValPair;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * This class calculates the average for various resources across different devices for a given key
  * <p>MachineInfoAveragingOperator class.</p>
@@ -184,7 +184,7 @@ public class MachineInfoAveragingOperator extends BaseOperator
   {
     StringBuilder sb = new StringBuilder();
     if (key instanceof MachineKey) {
-      MachineKey mkey = (MachineKey) key;
+      MachineKey mkey = (MachineKey)key;
       Integer customer = mkey.getCustomer();
       if (customer != null) {
         sb.append("customer: " + customer + "\n");

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingPrerequisitesOperator.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingPrerequisitesOperator.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingPrerequisitesOperator.java
index 15e6f07..cb5fa5a 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingPrerequisitesOperator.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingPrerequisitesOperator.java
@@ -18,19 +18,18 @@
  */
 package com.datatorrent.demos.machinedata.operator;
 
-import com.datatorrent.common.util.BaseOperator;
+import java.util.HashMap;
+import java.util.Map;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 
-import com.datatorrent.demos.machinedata.data.MachineKey;
-import com.datatorrent.demos.machinedata.data.MachineInfo;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.demos.machinedata.data.AverageData;
+import com.datatorrent.demos.machinedata.data.MachineInfo;
+import com.datatorrent.demos.machinedata.data.MachineKey;
 import com.datatorrent.lib.util.KeyHashValPair;
 
-
-import java.util.HashMap;
-import java.util.Map;
-
 /**
  * This class calculates the partial sum and count for tuples generated by upstream Operator
  * <p> MachineInfoAveragingPrerequisitesOperator class. </p>
@@ -51,8 +50,6 @@ public class MachineInfoAveragingPrerequisitesOperator extends BaseOperator
       MachineInfoAveragingUnifier unifier = new MachineInfoAveragingUnifier();
       return unifier;
     }
-
-    ;
   };
 
   public transient DefaultInputPort<MachineInfo> inputPort = new DefaultInputPort<MachineInfo>()
@@ -66,8 +63,7 @@ public class MachineInfoAveragingPrerequisitesOperator extends BaseOperator
       if (averageData == null) {
         averageData = new AverageData(tuple.getCpu(), tuple.getHdd(), tuple.getRam(), 1);
         sums.put(key, averageData);
-      }
-      else {
+      } else {
         averageData.setCpu(averageData.getCpu() + tuple.getCpu());
         averageData.setRam(averageData.getRam() + tuple.getRam());
         averageData.setHdd(averageData.getHdd() + tuple.getHdd());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingUnifier.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingUnifier.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingUnifier.java
index 40995b2..e0b67f3 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingUnifier.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/operator/MachineInfoAveragingUnifier.java
@@ -21,8 +21,8 @@ package com.datatorrent.demos.machinedata.operator;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 import com.datatorrent.demos.machinedata.data.AverageData;
@@ -80,8 +80,7 @@ public class MachineInfoAveragingUnifier implements Unifier<KeyHashValPair<Machi
     AverageData tupleValue = arg0.getValue();
     if (averageData == null) {
       sums.put(tupleKey, tupleValue);
-    }
-    else {
+    } else {
       averageData.setCpu(averageData.getCpu() + tupleValue.getCpu());
       averageData.setRam(averageData.getRam() + tupleValue.getRam());
       averageData.setHdd(averageData.getHdd() + tupleValue.getHdd());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/Combinatorics.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/Combinatorics.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/Combinatorics.java
index 88ee35d..6c4256a 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/Combinatorics.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/Combinatorics.java
@@ -18,7 +18,11 @@
  */
 package com.datatorrent.demos.machinedata.util;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * Generate combinations of elements for the given array of elements.
@@ -27,66 +31,71 @@ import java.util.*;
  *
  * @since 0.3.5
  */
-public class Combinatorics<T> {
+public class Combinatorics<T>
+{
 
-    private T[] values;
-    private int size = -1;
-    private List<T> result;
-    private Map<Integer, List<T>> resultMap = new HashMap<Integer, List<T>>();
-    private int resultMapSize = 0;
+  private T[] values;
+  private int size = -1;
+  private List<T> result;
+  private Map<Integer, List<T>> resultMap = new HashMap<Integer, List<T>>();
+  private int resultMapSize = 0;
 
-    /**
-     * Generates all possible combinations with all the sizes.
-     *
-     * @param values
-     */
-    public Combinatorics(T[] values) {
-        this.values = values;
-        this.size = -1;
-        this.result = new ArrayList<>();
-    }
+  /**
+   * Generates all possible combinations with all the sizes.
+   *
+   * @param values
+   */
+  public Combinatorics(T[] values)
+  {
+    this.values = values;
+    this.size = -1;
+    this.result = new ArrayList<>();
+  }
 
-    /**
-     * Generates all possible combinations with the given size.
-     *
-     * @param values
-     * @param size
-     */
-    public Combinatorics(T[] values, int size) {
-        this.values = values;
-        this.size = size;
-        this.result = new ArrayList<>();
-    }
+  /**
+   * Generates all possible combinations with the given size.
+   *
+   * @param values
+   * @param size
+   */
+  public Combinatorics(T[] values, int size)
+  {
+    this.values = values;
+    this.size = size;
+    this.result = new ArrayList<>();
+  }
 
-    public Map<Integer, List<T>> generate() {
+  public Map<Integer, List<T>> generate()
+  {
 
-        if (size == -1) {
-            size = values.length;
-            for (int i = 1; i <= size; i++) {
-                int[] tmp = new int[i];
-                Arrays.fill(tmp, -1);
-                generateCombinations(0, 0, tmp);
-            }
-        } else {
-            int[] tmp = new int[size];
-            Arrays.fill(tmp, -1);
-            generateCombinations(0, 0, tmp);
-        }
-        return resultMap;
+    if (size == -1) {
+      size = values.length;
+      for (int i = 1; i <= size; i++) {
+        int[] tmp = new int[i];
+        Arrays.fill(tmp, -1);
+        generateCombinations(0, 0, tmp);
+      }
+    } else {
+      int[] tmp = new int[size];
+      Arrays.fill(tmp, -1);
+      generateCombinations(0, 0, tmp);
     }
+    return resultMap;
+  }
 
-    public void generateCombinations(int start, int depth, int[] tmp) {
-        if (depth == tmp.length) {
-            for (int j = 0; j < depth; j++) {
-                result.add(values[tmp[j]]);
-            }
-            resultMap.put(++resultMapSize, result);
-            result = new ArrayList<>();
-            return;
-        }
-        for (int i = start; i < values.length; i++) {
-            tmp[depth] = i;
-            generateCombinations(i + 1, depth + 1, tmp);
-        }
+  public void generateCombinations(int start, int depth, int[] tmp)
+  {
+    if (depth == tmp.length) {
+      for (int j = 0; j < depth; j++) {
+        result.add(values[tmp[j]]);
+      }
+      resultMap.put(++resultMapSize, result);
+      result = new ArrayList<>();
+      return;
+    }
+    for (int i = start; i < values.length; i++) {
+      tmp[depth] = i;
+      generateCombinations(i + 1, depth + 1, tmp);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/DataTable.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/DataTable.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/DataTable.java
index 8820400..f8f2d33 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/DataTable.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/util/DataTable.java
@@ -30,36 +30,46 @@ import com.google.common.collect.Maps;
  *
  * @since 0.3.5
  */
-public class DataTable<R,C,E>   {
+public class DataTable<R,C,E>
+{
 
-	//machineKey, [cpu,ram,hdd] -> value
-	private final Map<R,Map<C,E>> table= Maps.newHashMap();
+  //machineKey, [cpu,ram,hdd] -> value
+  private final Map<R,Map<C,E>> table = Maps.newHashMap();
 
-	public boolean containsRow(R rowKey){
-		return table.containsKey(rowKey);
-	}
+  public boolean containsRow(R rowKey)
+  {
+    return table.containsKey(rowKey);
+  }
 
-	public void put(R rowKey,C colKey, E entry){
-		if(!containsRow(rowKey)){
-			table.put(rowKey, Maps.<C,E>newHashMap());
-		}
-		table.get(rowKey).put(colKey, entry);
-	}
+  public void put(R rowKey,C colKey, E entry)
+  {
+    if (!containsRow(rowKey)) {
+      table.put(rowKey, Maps.<C,E>newHashMap());
+    }
+    table.get(rowKey).put(colKey, entry);
+  }
 
-	@Nullable public E get(R rowKey, C colKey){
-		if(!containsRow(rowKey)) return null;
-		return table.get(rowKey).get(colKey);
-	}
+  @Nullable
+  public E get(R rowKey, C colKey)
+  {
+    if (!containsRow(rowKey)) {
+      return null;
+    }
+    return table.get(rowKey).get(colKey);
+  }
 
-	public Set<R> rowKeySet(){
-		return table.keySet();
-	}
+  public Set<R> rowKeySet()
+  {
+    return table.keySet();
+  }
 
-	public void clear(){
-		table.clear();
-	}
+  public void clear()
+  {
+    table.clear();
+  }
 
-	public Map<R,Map<C,E>> getTable(){
-		return table;
-	}
+  public Map<R,Map<C,E>> getTable()
+  {
+    return table;
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/test/java/com/datatorrent/demos/machinedata/CalculatorOperatorTest.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/test/java/com/datatorrent/demos/machinedata/CalculatorOperatorTest.java b/demos/machinedata/src/test/java/com/datatorrent/demos/machinedata/CalculatorOperatorTest.java
index 1a26bd1..0e397be 100644
--- a/demos/machinedata/src/test/java/com/datatorrent/demos/machinedata/CalculatorOperatorTest.java
+++ b/demos/machinedata/src/test/java/com/datatorrent/demos/machinedata/CalculatorOperatorTest.java
@@ -18,6 +18,20 @@
  */
 package com.datatorrent.demos.machinedata;
 
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableList;
+
 import com.datatorrent.demos.machinedata.data.MachineInfo;
 import com.datatorrent.demos.machinedata.data.MachineKey;
 import com.datatorrent.demos.machinedata.data.ResourceType;
@@ -26,20 +40,6 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.KeyValPair;
 import com.datatorrent.lib.util.TimeBucketKey;
 
-import com.google.common.collect.ImmutableList;
-
-import org.junit.Assert;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-
 /**
  * @since 0.3.5
  */
@@ -94,7 +94,7 @@ public class CalculatorOperatorTest
     Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
     for (Object o : sortSink.collectedTuples) {
       LOG.debug(o.toString());
-      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>) o;
+      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>)o;
       Assert.assertEquals("emitted value for 'cpu' was ", 2.0, keyValPair.getValue().get(ResourceType.CPU), 0);
       Assert.assertEquals("emitted value for 'hdd' was ", 1.0, keyValPair.getValue().get(ResourceType.HDD), 0);
       Assert.assertEquals("emitted value for 'ram' was ", 1.0, keyValPair.getValue().get(ResourceType.RAM), 0);
@@ -132,7 +132,7 @@ public class CalculatorOperatorTest
     Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
     for (Object o : sortSink.collectedTuples) {
       LOG.debug(o.toString());
-      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>) o;
+      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>)o;
       Assert.assertEquals("emitted value for 'cpu' was ", getSD(ImmutableList.of(1, 2, 3)), keyValPair.getValue().get(ResourceType.CPU), 0);
       Assert.assertEquals("emitted value for 'hdd' was ", getSD(ImmutableList.of(1, 1, 1)), keyValPair.getValue().get(ResourceType.HDD), 0);
       Assert.assertEquals("emitted value for 'ram' was ", getSD(ImmutableList.of(1, 1, 1)), keyValPair.getValue().get(ResourceType.RAM), 0);
@@ -184,7 +184,7 @@ public class CalculatorOperatorTest
     Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
     for (Object o : sortSink.collectedTuples) {
       LOG.debug(o.toString());
-      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>) o;
+      KeyValPair<TimeBucketKey, Map<ResourceType, Double>> keyValPair = (KeyValPair<TimeBucketKey, Map<ResourceType, Double>>)o;
       Assert.assertEquals("emitted value for 'cpu' was ", 3, keyValPair.getValue().get(ResourceType.CPU), 0);
       Assert.assertEquals("emitted value for 'hdd' was ", 1, keyValPair.getValue().get(ResourceType.HDD), 0);
       Assert.assertEquals("emitted value for 'ram' was ", 1, keyValPair.getValue().get(ResourceType.RAM), 0);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mobile/src/main/java/com/datatorrent/demos/mobile/Application.java
----------------------------------------------------------------------
diff --git a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/Application.java b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/Application.java
index 9d9f31b..30d7281 100644
--- a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/Application.java
+++ b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/Application.java
@@ -18,6 +18,18 @@
  */
 package com.datatorrent.demos.mobile;
 
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Random;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang.mutable.MutableLong;
+import org.apache.commons.lang3.Range;
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.StatsListener;
@@ -28,16 +40,6 @@ import com.datatorrent.lib.io.PubSubWebSocketInputOperator;
 import com.datatorrent.lib.io.PubSubWebSocketOutputOperator;
 import com.datatorrent.lib.partitioner.StatelessThroughputBasedPartitioner;
 import com.datatorrent.lib.testbench.RandomEventGenerator;
-import org.apache.commons.lang.mutable.MutableLong;
-import org.apache.commons.lang3.Range;
-import org.apache.hadoop.conf.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.net.URI;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.Random;
 
 /**
  * Mobile Demo Application:

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
----------------------------------------------------------------------
diff --git a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
index 3b1e49d..8964d84 100644
--- a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
+++ b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
@@ -18,18 +18,20 @@
  */
 package com.datatorrent.demos.mobile;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.InputPortFieldAnnotation;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Range;
+import java.util.Map;
+import java.util.Random;
+import javax.validation.constraints.Min;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.validation.constraints.Min;
-import java.util.Map;
-import java.util.Random;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Range;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Generates mobile numbers that will be displayed in mobile demo just after launch.<br></br>
@@ -99,7 +101,8 @@ public class PhoneEntryOperator extends BaseOperator
   public final transient DefaultOutputPort<Map<String, String>> seedPhones = new DefaultOutputPort<Map<String, String>>();
 
   @Override
-  public void beginWindow(long windowId){
+  public void beginWindow(long windowId)
+  {
     if (!seedGenerationDone) {
       Random random = new Random();
       int maxPhone = (maxSeedPhoneNumber <= rangeUpperEndpoint && maxSeedPhoneNumber >= rangeLowerEndpoint) ? maxSeedPhoneNumber : rangeUpperEndpoint;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneMovementGenerator.java
----------------------------------------------------------------------
diff --git a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneMovementGenerator.java b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneMovementGenerator.java
index 8db74cd..a46e6d4 100644
--- a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneMovementGenerator.java
+++ b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneMovementGenerator.java
@@ -25,20 +25,20 @@ import java.util.Set;
 
 import javax.validation.constraints.Min;
 
-import org.apache.commons.lang.mutable.MutableLong;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableLong;
+
 import com.google.common.base.Strings;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
-
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.counters.BasicCounters;
 import com.datatorrent.lib.util.HighLow;
 
@@ -73,8 +73,7 @@ public class PhoneMovementGenerator extends BaseOperator
       if (delta >= threshold) {
         if (state < 2) {
           xloc++;
-        }
-        else {
+        } else {
           xloc--;
         }
         if (xloc < 0) {
@@ -85,8 +84,7 @@ public class PhoneMovementGenerator extends BaseOperator
       if (delta >= threshold) {
         if ((state == 1) || (state == 3)) {
           yloc++;
-        }
-        else {
+        } else {
           yloc--;
         }
         if (yloc < 0) {
@@ -100,8 +98,7 @@ public class PhoneMovementGenerator extends BaseOperator
       HighLow<Integer> nloc = newgps.get(tuple);
       if (nloc == null) {
         newgps.put(tuple, new HighLow<Integer>(xloc, yloc));
-      }
-      else {
+      } else {
         nloc.setHigh(xloc);
         nloc.setLow(yloc);
       }
@@ -109,7 +106,7 @@ public class PhoneMovementGenerator extends BaseOperator
     }
   };
 
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient DefaultInputPort<Map<String,String>> phoneQuery = new DefaultInputPort<Map<String,String>>()
   {
     @Override
@@ -120,19 +117,16 @@ public class PhoneMovementGenerator extends BaseOperator
       if (command != null) {
         if (command.equals(COMMAND_ADD)) {
           commandCounters.getCounter(CommandCounters.ADD).increment();
-          String phoneStr= tuple.get(KEY_PHONE);
+          String phoneStr = tuple.get(KEY_PHONE);
           registerPhone(phoneStr);
-        }
-        else if (command.equals(COMMAND_ADD_RANGE)) {
+        } else if (command.equals(COMMAND_ADD_RANGE)) {
           commandCounters.getCounter(CommandCounters.ADD_RANGE).increment();
           registerPhoneRange(tuple.get(KEY_START_PHONE), tuple.get(KEY_END_PHONE));
-        }
-        else if (command.equals(COMMAND_DELETE)) {
+        } else if (command.equals(COMMAND_DELETE)) {
           commandCounters.getCounter(CommandCounters.DELETE).increment();
-          String phoneStr= tuple.get(KEY_PHONE);
+          String phoneStr = tuple.get(KEY_PHONE);
           deregisterPhone(phoneStr);
-        }
-        else if (command.equals(COMMAND_CLEAR)) {
+        } else if (command.equals(COMMAND_CLEAR)) {
           commandCounters.getCounter(CommandCounters.CLEAR).increment();
           clearPhones();
         }
@@ -181,7 +175,7 @@ public class PhoneMovementGenerator extends BaseOperator
 
   /**
    * Sets the range of phone numbers for which the GPS locations need to be generated.
-   * 
+   *
    * @param i the range of phone numbers to set
    */
   public void setRange(int i)
@@ -190,7 +184,7 @@ public class PhoneMovementGenerator extends BaseOperator
   }
 
   /**
-   * @return the threshold 
+   * @return the threshold
    */
   @Min(0)
   public int getThreshold()
@@ -200,7 +194,7 @@ public class PhoneMovementGenerator extends BaseOperator
 
   /**
    * Sets the threshold that decides how frequently the GPS locations are updated.
-   * 
+   *
    * @param i the value that decides how frequently the GPS locations change.
    */
   public void setThreshold(int i)
@@ -217,8 +211,7 @@ public class PhoneMovementGenerator extends BaseOperator
     try {
       Integer phone = new Integer(phoneStr);
       registerSinglePhone(phone);
-    }
-    catch (NumberFormatException nfe) {
+    } catch (NumberFormatException nfe) {
       LOG.warn("Invalid no {}", phoneStr);
     }
   }
@@ -239,8 +232,7 @@ public class PhoneMovementGenerator extends BaseOperator
       for (int i = startPhone; i <= endPhone; i++) {
         registerSinglePhone(i);
       }
-    }
-    catch (NumberFormatException nfe) {
+    } catch (NumberFormatException nfe) {
       LOG.warn("Invalid phone range <{},{}>", startPhoneStr, endPhoneStr);
     }
   }
@@ -265,13 +257,13 @@ public class PhoneMovementGenerator extends BaseOperator
         LOG.debug("Removing query id {}", phone);
         emitPhoneRemoved(phone);
       }
-    }
-    catch (NumberFormatException nfe) {
+    } catch (NumberFormatException nfe) {
       LOG.warn("Invalid phone {}", phoneStr);
     }
   }
 
-  private void clearPhones() {
+  private void clearPhones()
+  {
     phoneRegister.clear();
     LOG.info("Clearing phones");
   }
@@ -298,8 +290,7 @@ public class PhoneMovementGenerator extends BaseOperator
       HighLow<Integer> loc = gps.get(e.getKey());
       if (loc == null) {
         gps.put(e.getKey(), e.getValue());
-      }
-      else {
+      } else {
         loc.setHigh(e.getValue().getHigh());
         loc.setLow(e.getValue().getLow());
       }
@@ -316,7 +307,8 @@ public class PhoneMovementGenerator extends BaseOperator
     context.setCounters(commandCounters);
   }
 
-  private void emitQueryResult(Integer phone) {
+  private void emitQueryResult(Integer phone)
+  {
     HighLow<Integer> loc = gps.get(phone);
     if (loc != null) {
       Map<String, String> queryResult = new HashMap<String, String>();
@@ -328,7 +320,7 @@ public class PhoneMovementGenerator extends BaseOperator
 
   private void emitPhoneRemoved(Integer phone)
   {
-    Map<String,String> removedResult= Maps.newHashMap();
+    Map<String,String> removedResult = Maps.newHashMap();
     removedResult.put(KEY_PHONE, String.valueOf(phone));
     removedResult.put(KEY_REMOVED,"true");
     locationQueryResult.emit(removedResult);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mobile/src/test/java/com/datatorrent/demos/mobile/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/mobile/src/test/java/com/datatorrent/demos/mobile/ApplicationTest.java b/demos/mobile/src/test/java/com/datatorrent/demos/mobile/ApplicationTest.java
index 72d6514..87e40bf 100644
--- a/demos/mobile/src/test/java/com/datatorrent/demos/mobile/ApplicationTest.java
+++ b/demos/mobile/src/test/java/com/datatorrent/demos/mobile/ApplicationTest.java
@@ -35,13 +35,13 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 
+import com.datatorrent.api.LocalMode;
+
 import com.datatorrent.lib.helper.SamplePubSubWebSocketServlet;
 import com.datatorrent.lib.io.PubSubWebSocketInputOperator;
 import com.datatorrent.lib.io.PubSubWebSocketOutputOperator;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
-import com.datatorrent.api.LocalMode;
-
 public class ApplicationTest
 {
   private static final Logger LOG = LoggerFactory.getLogger(ApplicationTest.class);
@@ -65,7 +65,7 @@ public class ApplicationTest
     contextHandler.addServlet(sh, "/pubsub");
     contextHandler.addServlet(sh, "/*");
     server.start();
-    Connector connector[] = server.getConnectors();
+    Connector[] connector = server.getConnectors();
     conf.set("dt.attr.GATEWAY_CONNECT_ADDRESS", "localhost:" + connector[0].getLocalPort());
     URI uri = URI.create("ws://localhost:" + connector[0].getLocalPort() + "/pubsub");
 
@@ -111,7 +111,7 @@ public class ApplicationTest
     server.stop();
     Assert.assertTrue("size of output is 5 ", sink.collectedTuples.size() == 5);
     for (Object obj : sink.collectedTuples) {
-      Assert.assertEquals("Expected phone number", "5559990", ((Map<String, String>) obj).get("phone"));
+      Assert.assertEquals("Expected phone number", "5559990", ((Map<String, String>)obj).get("phone"));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Application.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Application.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Application.java
index 245d9c4..5625439 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Application.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Application.java
@@ -20,20 +20,19 @@ package com.datatorrent.demos.mrmonitor;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.datatorrent.lib.io.ConsoleOutputOperator;
-import com.datatorrent.lib.testbench.SeedEventGenerator;
-
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
+import com.datatorrent.lib.io.ConsoleOutputOperator;
+import com.datatorrent.lib.testbench.SeedEventGenerator;
 
 /**
  * Application
  *
  * @since 2.0.0
  */
-@ApplicationAnnotation(name="MyFirstApplication")
+@ApplicationAnnotation(name = "MyFirstApplication")
 public class Application implements StreamingApplication
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Constants.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Constants.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Constants.java
index 2f3d651..7930405 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Constants.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/Constants.java
@@ -26,23 +26,23 @@ package com.datatorrent.demos.mrmonitor;
 public interface Constants
 {
 
-  public final static int MAX_NUMBER_OF_JOBS = 25;
+  public static final int MAX_NUMBER_OF_JOBS = 25;
 
-  public final static String REDUCE_TASK_TYPE = "REDUCE";
-  public final static String MAP_TASK_TYPE = "MAP";
-  public final static String TASK_TYPE = "type";
-  public final static String TASK_ID = "id";
+  public static final String REDUCE_TASK_TYPE = "REDUCE";
+  public static final String MAP_TASK_TYPE = "MAP";
+  public static final String TASK_TYPE = "type";
+  public static final String TASK_ID = "id";
 
-  public final static String LEAGACY_TASK_ID = "taskId";
-  public final static int MAX_TASKS = 2000;
+  public static final String LEAGACY_TASK_ID = "taskId";
+  public static final int MAX_TASKS = 2000;
 
-  public final static String QUERY_APP_ID = "app_id";
-  public final static String QUERY_JOB_ID = "job_id";
-  public final static String QUERY_HADOOP_VERSION = "hadoop_version";
-  public final static String QUERY_API_VERSION = "api_version";
-  public final static String QUERY_RM_PORT = "rm_port";
-  public final static String QUERY_HS_PORT = "hs_port";
-  public final static String QUERY_HOST_NAME = "hostname";
+  public static final String QUERY_APP_ID = "app_id";
+  public static final String QUERY_JOB_ID = "job_id";
+  public static final String QUERY_HADOOP_VERSION = "hadoop_version";
+  public static final String QUERY_API_VERSION = "api_version";
+  public static final String QUERY_RM_PORT = "rm_port";
+  public static final String QUERY_HS_PORT = "hs_port";
+  public static final String QUERY_HOST_NAME = "hostname";
   public static final String QUERY_KEY_COMMAND = "command";
 
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRJobStatusOperator.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRJobStatusOperator.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRJobStatusOperator.java
index 88863e2..263a1a7 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRJobStatusOperator.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRJobStatusOperator.java
@@ -18,7 +18,11 @@
  */
 package com.datatorrent.demos.mrmonitor;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
 
 import org.codehaus.jettison.json.JSONArray;
@@ -107,8 +111,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
           outputJsonObject.put("id", mrStatusObj.getJobId());
           outputJsonObject.put("removed", "true");
           output.emit(outputJsonObject.toString());
-        }
-        catch (JSONException e) {
+        } catch (JSONException e) {
           LOG.warn("Error creating JSON: {}", e.getMessage());
         }
         return;
@@ -123,8 +126,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
       }
       if (mrStatusObj.getHadoopVersion() == 2) {
         getJsonForJob(mrStatusObj);
-      }
-      else if (mrStatusObj.getHadoopVersion() == 1) {
+      } else if (mrStatusObj.getHadoopVersion() == 1) {
         getJsonForLegacyJob(mrStatusObj);
       }
       mrStatusObj.setStatusHistoryCount(statusHistoryTime);
@@ -204,8 +206,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
     if (jsonObj != null) {
       if (statusObj.getMetricObject() == null) {
         statusObj.setMetricObject(new TaskObject(jsonObj));
-      }
-      else if (!statusObj.getMetricObject().getJsonString().equalsIgnoreCase(jsonObj.toString())) {
+      } else if (!statusObj.getMetricObject().getJsonString().equalsIgnoreCase(jsonObj.toString())) {
         statusObj.getMetricObject().setJson(jsonObj);
         statusObj.getMetricObject().setModified(true);
       }
@@ -252,8 +253,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
               continue;
             }
             reduceTaskOject.put(taskObj.getString(Constants.TASK_ID), new TaskObject(taskObj));
-          }
-          else {
+          } else {
             if (mapTaskOject.get(taskObj.getString(Constants.TASK_ID)) != null) {
               TaskObject tempTaskObj = mapTaskOject.get(taskObj.getString(Constants.TASK_ID));
               if (tempTaskObj.getJsonString().equals(taskObj.toString())) {
@@ -269,8 +269,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
         }
         statusObj.setMapJsonObject(mapTaskOject);
         statusObj.setReduceJsonObject(reduceTaskOject);
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         LOG.info("exception: {}", e.getMessage());
       }
     }
@@ -324,12 +323,11 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
   {
     try {
       JSONObject jobJson = statusObj.getJsonObject();
-      int totalTasks = ((JSONObject) ((JSONObject) jobJson.get(type + "TaskSummary")).get("taskStats")).getInt("numTotalTasks");
+      int totalTasks = ((JSONObject)((JSONObject)jobJson.get(type + "TaskSummary")).get("taskStats")).getInt("numTotalTasks");
       Map<String, TaskObject> taskMap;
       if (type.equalsIgnoreCase("map")) {
         taskMap = statusObj.getMapJsonObject();
-      }
-      else {
+      } else {
         taskMap = statusObj.getReduceJsonObject();
       }
 
@@ -371,12 +369,10 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
 
       if (type.equalsIgnoreCase("map")) {
         statusObj.setMapJsonObject(taskMap);
-      }
-      else {
+      } else {
         statusObj.setReduceJsonObject(taskMap);
       }
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       LOG.info(e.getMessage());
     }
 
@@ -387,8 +383,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
   {
     try {
       Thread.sleep(sleepTime);//
-    }
-    catch (InterruptedException ie) {
+    } catch (InterruptedException ie) {
       // If this thread was intrrupted by nother thread
     }
     if (!iterator.hasNext()) {
@@ -399,8 +394,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
       MRStatusObject obj = iterator.next();
       if (obj.getHadoopVersion() == 2) {
         getJsonForJob(obj);
-      }
-      else if (obj.getHadoopVersion() == 1) {
+      } else if (obj.getHadoopVersion() == 1) {
         getJsonForLegacyJob(obj);
       }
     }
@@ -465,8 +459,7 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
       outputJsonObject.put("tasks", arr);
       reduceOutput.emit(outputJsonObject.toString());
       obj.setRetrials(0);
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       LOG.warn("error creating json {}", e.getMessage());
     }
 
@@ -543,17 +536,14 @@ public class MRJobStatusOperator implements Operator, IdleTimeHandler
         if (!modified) {
           if (obj.getRetrials() >= maxRetrials) {
             delList.add(obj.getJobId());
-          }
-          else {
+          } else {
             obj.setRetrials(obj.getRetrials() + 1);
           }
-        }
-        else {
+        } else {
           obj.setRetrials(0);
         }
       }
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       LOG.warn("error creating json {}", ex.getMessage());
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRMonitoringApplication.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRMonitoringApplication.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRMonitoringApplication.java
index 5758ad1..037378a 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRMonitoringApplication.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRMonitoringApplication.java
@@ -20,10 +20,11 @@ package com.datatorrent.demos.mrmonitor;
 
 import java.net.URI;
 
-import org.apache.hadoop.conf.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRStatusObject.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRStatusObject.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRStatusObject.java
index f0471f3..481f3dc 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRStatusObject.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRStatusObject.java
@@ -149,7 +149,8 @@ public class MRStatusObject
     virtualMemoryStatusHistory = new LinkedList<String>();
     cpuStatusHistory = new LinkedList<String>();
     statusScheduler = Executors.newScheduledThreadPool(1);
-    statusScheduler.scheduleAtFixedRate(new Runnable() {
+    statusScheduler.scheduleAtFixedRate(new Runnable()
+    {
       @Override
       public void run()
       {
@@ -333,12 +334,15 @@ public class MRStatusObject
   @Override
   public boolean equals(Object that)
   {
-    if (this == that)
+    if (this == that) {
       return true;
-    if (!(that instanceof MRStatusObject))
+    }
+    if (!(that instanceof MRStatusObject)) {
       return false;
-    if (this.hashCode() == that.hashCode())
+    }
+    if (this.hashCode() == that.hashCode()) {
       return true;
+    }
     return false;
   }
 
@@ -443,7 +447,7 @@ public class MRStatusObject
 
     /**
      * This returns the task information as json
-     * 
+     *
      * @return
      */
     public JSONObject getJson()
@@ -453,7 +457,7 @@ public class MRStatusObject
 
     /**
      * This stores the task information as json
-     * 
+     *
      * @param json
      */
     public void setJson(JSONObject json)
@@ -463,7 +467,7 @@ public class MRStatusObject
 
     /**
      * This returns if the json object has been modified
-     * 
+     *
      * @return
      */
     public boolean isModified()
@@ -473,7 +477,7 @@ public class MRStatusObject
 
     /**
      * This sets if the json object is modified
-     * 
+     *
      * @param modified
      */
     public void setModified(boolean modified)
@@ -483,7 +487,7 @@ public class MRStatusObject
 
     /**
      * This returns the string format of the json object
-     * 
+     *
      * @return
      */
     public String getJsonString()

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRUtil.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRUtil.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRUtil.java
index cb10347..0d7f6af 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRUtil.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MRUtil.java
@@ -20,15 +20,16 @@ package com.datatorrent.demos.mrmonitor;
 
 import java.io.IOException;
 
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.http.client.ClientProtocolException;
 import org.apache.http.client.HttpClient;
 import org.apache.http.client.ResponseHandler;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.impl.client.BasicResponseHandler;
 import org.apache.http.impl.client.DefaultHttpClient;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * <p>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MapToMRObjectOperator.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MapToMRObjectOperator.java b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MapToMRObjectOperator.java
index e37454f..5075163 100644
--- a/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MapToMRObjectOperator.java
+++ b/demos/mrmonitor/src/main/java/com/datatorrent/demos/mrmonitor/MapToMRObjectOperator.java
@@ -33,7 +33,8 @@ import com.datatorrent.api.Operator;
 public class MapToMRObjectOperator implements Operator
 {
 
-  public final transient DefaultInputPort<Map<String, String>> input = new DefaultInputPort<Map<String, String>>() {
+  public final transient DefaultInputPort<Map<String, String>> input = new DefaultInputPort<Map<String, String>>()
+  {
     @Override
     public void process(Map<String, String> tuple)
     {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mrmonitor/src/test/java/com/datatorrent/demos/mrmonitor/MrMonitoringApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/mrmonitor/src/test/java/com/datatorrent/demos/mrmonitor/MrMonitoringApplicationTest.java b/demos/mrmonitor/src/test/java/com/datatorrent/demos/mrmonitor/MrMonitoringApplicationTest.java
index 70cf840..ad8de02 100644
--- a/demos/mrmonitor/src/test/java/com/datatorrent/demos/mrmonitor/MrMonitoringApplicationTest.java
+++ b/demos/mrmonitor/src/test/java/com/datatorrent/demos/mrmonitor/MrMonitoringApplicationTest.java
@@ -28,9 +28,8 @@ import org.junit.Test;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.datatorrent.lib.helper.SamplePubSubWebSocketServlet;
-
 import com.datatorrent.api.LocalMode;
+import com.datatorrent.lib.helper.SamplePubSubWebSocketServlet;
 
 /**
  * <p>MapReduceDebuggerApplicationTest class.</p>
@@ -53,7 +52,7 @@ public class MrMonitoringApplicationTest
     contextHandler.addServlet(sh, "/pubsub");
     contextHandler.addServlet(sh, "/*");
     server.start();
-    Connector connector[] = server.getConnectors();
+    Connector[] connector = server.getConnectors();
     conf.set("dt.attr.GATEWAY_CONNECT_ADDRESS", "localhost:" + connector[0].getLocalPort());
 
     MRMonitoringApplication application = new MRMonitoringApplication();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/DateWritable.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/DateWritable.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/DateWritable.java
index 8ecf76e..5dbd83f 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/DateWritable.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/DateWritable.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.demos.mroperator;
 
-import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
 
 import org.apache.hadoop.io.WritableComparable;
 
@@ -33,44 +33,48 @@ import org.apache.hadoop.io.WritableComparable;
  */
 public class DateWritable implements WritableComparable<DateWritable>
 {
-	private final static SimpleDateFormat formatter = new SimpleDateFormat( "yyyy-MM-dd' T 'HH:mm:ss.SSS" );
-	private Date date;
+  private static final SimpleDateFormat formatter = new SimpleDateFormat( "yyyy-MM-dd' T 'HH:mm:ss.SSS" );
+  private Date date;
+
+  public Date getDate()
+  {
+    return date;
+  }
+
+  public void setDate( Date date )
+  {
+    this.date = date;
+  }
 
-	public Date getDate()
-	{
-		return date;
-	}
+  public void readFields( DataInput in ) throws IOException
+  {
+    date = new Date( in.readLong() );
+  }
 
-	public void setDate( Date date )
-	{
-		this.date = date;
-	}
+  public void write( DataOutput out ) throws IOException
+  {
+    out.writeLong( date.getTime() );
+  }
 
-	public void readFields( DataInput in ) throws IOException
-	{
-		date = new Date( in.readLong() );
-	}
+  @Override
+  public boolean equals(Object o)
+  {
+    return toString().equals(o.toString());
+  }
 
-	public void write( DataOutput out ) throws IOException
-	{
-		out.writeLong( date.getTime() );
-	}
+  @Override
+  public int hashCode()
+  {
+    return toString().hashCode();
+  }
 
-	@Override
-	public boolean equals(Object o){
-		return toString().equals(o.toString());
-	}
-	@Override
-	public int hashCode(){
-		return toString().hashCode();
-	}
-	public String toString()
-	{
-		return formatter.format( date);
-	}
+  public String toString()
+  {
+    return formatter.format( date);
+  }
 
-    public int compareTo( DateWritable other )
-    {
-        return date.compareTo( other.getDate() );
-    }
+  public int compareTo( DateWritable other )
+  {
+    return date.compareTo( other.getDate() );
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/HdfsKeyValOutputOperator.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/HdfsKeyValOutputOperator.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/HdfsKeyValOutputOperator.java
index b6b9735..c4b9c49 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/HdfsKeyValOutputOperator.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/HdfsKeyValOutputOperator.java
@@ -36,6 +36,6 @@ public class HdfsKeyValOutputOperator<K, V> extends AbstractSingleFileOutputOper
   @Override
   public byte[] getBytesForTuple(KeyHashValPair<K,V> t)
   {
-    return (t.toString()+"\n").getBytes();
+    return (t.toString() + "\n").getBytes();
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/InvertedIndexApplication.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/InvertedIndexApplication.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/InvertedIndexApplication.java
index dae07a2..076b8ac 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/InvertedIndexApplication.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/InvertedIndexApplication.java
@@ -29,7 +29,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * @since 0.9.0
  */
-@ApplicationAnnotation(name="InvertedIndexDemo")
+@ApplicationAnnotation(name = "InvertedIndexDemo")
 public class InvertedIndexApplication extends MapReduceApplication<LongWritable, Text, Text, Text>
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LineIndexer.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LineIndexer.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LineIndexer.java
index aabea81..e963954 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LineIndexer.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LineIndexer.java
@@ -41,18 +41,18 @@ import org.apache.hadoop.mapred.Reporter;
  *
  * @since 0.9.0
  */
-public class LineIndexer {
+public class LineIndexer
+{
 
   public static class LineIndexMapper extends MapReduceBase
-      implements Mapper<LongWritable, Text, Text, Text> {
-
-    private final static Text word = new Text();
-    private final static Text location = new Text();
+      implements Mapper<LongWritable, Text, Text, Text>
+  {
+    private static final Text word = new Text();
+    private static final Text location = new Text();
 
     public void map(LongWritable key, Text val,
-        OutputCollector<Text, Text> output, Reporter reporter)
-        throws IOException {
-
+        OutputCollector<Text, Text> output, Reporter reporter) throws IOException
+    {
       FileSplit fileSplit = (FileSplit)reporter.getInputSplit();
       String fileName = fileSplit.getPath().getName();
       location.set(fileName);
@@ -69,18 +69,18 @@ public class LineIndexer {
 
 
   public static class LineIndexReducer extends MapReduceBase
-      implements Reducer<Text, Text, Text, Text> {
-
+      implements Reducer<Text, Text, Text, Text>
+  {
     public void reduce(Text key, Iterator<Text> values,
-        OutputCollector<Text, Text> output, Reporter reporter)
-        throws IOException {
-
+        OutputCollector<Text, Text> output, Reporter reporter) throws IOException
+    {
       boolean first = true;
       StringBuilder toReturn = new StringBuilder();
-      while (values.hasNext()){
-        if (!first)
+      while (values.hasNext()) {
+        if (!first) {
           toReturn.append(", ");
-        first=false;
+        }
+        first = false;
         toReturn.append(values.next().toString());
       }
 
@@ -93,7 +93,8 @@ public class LineIndexer {
    * The actual main() method for our program; this is the
    * "driver" for the MapReduce job.
    */
-  public static void main(String[] args) {
+  public static void main(String[] args)
+  {
     JobClient client = new JobClient();
     JobConf conf = new JobConf(LineIndexer.class);
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogCountsPerHour.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogCountsPerHour.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogCountsPerHour.java
index 793ad4d..69ee892 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogCountsPerHour.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogCountsPerHour.java
@@ -18,201 +18,170 @@
  */
 package com.datatorrent.demos.mroperator;
 
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.Iterator;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextOutputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-
-import java.io.IOException;
-import java.util.Calendar;
-import java.util.Iterator;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * <p>LogCountsPerHour class.</p>
  *
  * @since 0.9.0
  */
-public class LogCountsPerHour extends Configured implements Tool {
+public class LogCountsPerHour extends Configured implements Tool
+{
 
-    public static class LogMapClass extends MapReduceBase
-            implements Mapper<LongWritable, Text, DateWritable, IntWritable>
-    {
-        private DateWritable date = new DateWritable();
-        private final static IntWritable one = new IntWritable( 1 );
-
-        public void map( LongWritable key, // Offset into the file
-                         Text value,
-                         OutputCollector<DateWritable, IntWritable> output,
-                         Reporter reporter) throws IOException
-        {
-            // Get the value as a String; it is of the format:
-        	// 111.111.111.111 - - [16/Dec/2012:05:32:50 -0500] "GET / HTTP/1.1" 200 14791 "-" "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
-            String text = value.toString();
-
-            // Get the date and time
-            int openBracket = text.indexOf( '[' );
-            int closeBracket = text.indexOf( ']' );
-            if( openBracket != -1 && closeBracket != -1 )
-            {
-            	// Read the date
-            	String dateString = text.substring( text.indexOf( '[' ) + 1, text.indexOf( ']' ) );
-
-            	// Build a date object from a string of the form: 16/Dec/2012:05:32:50 -0500
-                int index = 0;
-                int nextIndex = dateString.indexOf( '/' );
-                int day = Integer.parseInt( dateString.substring(index, nextIndex) );
-
-                index = nextIndex;
-                nextIndex = dateString.indexOf( '/', index+1 );
-                String month = dateString.substring( index+1, nextIndex );
-
-                index = nextIndex;
-                nextIndex = dateString.indexOf( ':', index );
-                int year = Integer.parseInt(dateString.substring(index + 1, nextIndex));
-
-                index = nextIndex;
-                nextIndex = dateString.indexOf( ':', index+1 );
-                int hour = Integer.parseInt(dateString.substring(index + 1, nextIndex));
-
-                // Build a calendar object for this date
-                Calendar calendar = Calendar.getInstance();
-                calendar.set( Calendar.DATE, day );
-                calendar.set( Calendar.YEAR, year );
-                calendar.set( Calendar.HOUR, hour );
-                calendar.set( Calendar.MINUTE, 0 );
-                calendar.set( Calendar.SECOND, 0 );
-                calendar.set( Calendar.MILLISECOND, 0 );
-
-                if( month.equalsIgnoreCase( "dec" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.DECEMBER );
-                }
-                else if( month.equalsIgnoreCase( "nov" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.NOVEMBER );
-                }
-                else if( month.equalsIgnoreCase( "oct" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.OCTOBER );
-                }
-                else if( month.equalsIgnoreCase( "sep" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.SEPTEMBER );
-                }
-                else if( month.equalsIgnoreCase( "aug" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.AUGUST );
-                }
-                else if( month.equalsIgnoreCase( "jul" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.JULY );
-                }
-                else if( month.equalsIgnoreCase( "jun" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.JUNE );
-                }
-                else if( month.equalsIgnoreCase( "may" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.MAY );
-                }
-                else if( month.equalsIgnoreCase( "apr" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.APRIL );
-                }
-                else if( month.equalsIgnoreCase( "mar" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.MARCH );
-                }
-                else if( month.equalsIgnoreCase( "feb" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.FEBRUARY );
-                }
-                else if( month.equalsIgnoreCase( "jan" ) )
-                {
-                    calendar.set( Calendar.MONTH, Calendar.JANUARY );
-                }
-
-
-                // Output the date as the key and 1 as the value
-                date.setDate( calendar.getTime() );
-                output.collect(date, one);
-            }
-        }
-    }
+  public static class LogMapClass extends MapReduceBase
+      implements Mapper<LongWritable, Text, DateWritable, IntWritable>
+  {
+    private DateWritable date = new DateWritable();
+    private static final IntWritable one = new IntWritable(1);
 
-    public static class LogReduce extends MapReduceBase
-            implements Reducer<DateWritable, IntWritable, DateWritable, IntWritable>
+    public void map(LongWritable key, Text value, OutputCollector<DateWritable, IntWritable> output, Reporter reporter) throws IOException
     {
-        public void reduce( DateWritable key, Iterator<IntWritable> values,
-                            OutputCollector<DateWritable, IntWritable> output,
-                            Reporter reporter) throws IOException
-        {
-            // Iterate over all of the values (counts of occurrences of this word)
-            int count = 0;
-            while( values.hasNext() )
-            {
-                // Add the value to our count
-                count += values.next().get();
-            }
-
-            // Output the word with its count (wrapped in an IntWritable)
-            output.collect( key, new IntWritable( count ) );
+      // Get the value as a String; it is of the format:
+      // 111.111.111.111 - - [16/Dec/2012:05:32:50 -0500] "GET / HTTP/1.1" 200 14791 "-" "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
+      String text = value.toString();
+
+      // Get the date and time
+      int openBracket = text.indexOf('[');
+      int closeBracket = text.indexOf(']');
+      if (openBracket != -1 && closeBracket != -1) {
+        // Read the date
+        String dateString = text.substring(text.indexOf('[') + 1, text.indexOf(']'));
+
+        // Build a date object from a string of the form: 16/Dec/2012:05:32:50 -0500
+        int index = 0;
+        int nextIndex = dateString.indexOf('/');
+        int day = Integer.parseInt(dateString.substring(index, nextIndex));
+
+        index = nextIndex;
+        nextIndex = dateString.indexOf('/', index + 1);
+        String month = dateString.substring(index + 1, nextIndex);
+
+        index = nextIndex;
+        nextIndex = dateString.indexOf(':', index);
+        int year = Integer.parseInt(dateString.substring(index + 1, nextIndex));
+
+        index = nextIndex;
+        nextIndex = dateString.indexOf(':', index + 1);
+        int hour = Integer.parseInt(dateString.substring(index + 1, nextIndex));
+
+        // Build a calendar object for this date
+        Calendar calendar = Calendar.getInstance();
+        calendar.set(Calendar.DATE, day);
+        calendar.set(Calendar.YEAR, year);
+        calendar.set(Calendar.HOUR, hour);
+        calendar.set(Calendar.MINUTE, 0);
+        calendar.set(Calendar.SECOND, 0);
+        calendar.set(Calendar.MILLISECOND, 0);
+
+        if (month.equalsIgnoreCase("dec")) {
+          calendar.set(Calendar.MONTH, Calendar.DECEMBER);
+        } else if (month.equalsIgnoreCase("nov")) {
+          calendar.set(Calendar.MONTH, Calendar.NOVEMBER);
+        } else if (month.equalsIgnoreCase("oct")) {
+          calendar.set(Calendar.MONTH, Calendar.OCTOBER);
+        } else if (month.equalsIgnoreCase("sep")) {
+          calendar.set(Calendar.MONTH, Calendar.SEPTEMBER);
+        } else if (month.equalsIgnoreCase("aug")) {
+          calendar.set(Calendar.MONTH, Calendar.AUGUST);
+        } else if (month.equalsIgnoreCase("jul")) {
+          calendar.set(Calendar.MONTH, Calendar.JULY);
+        } else if (month.equalsIgnoreCase("jun")) {
+          calendar.set(Calendar.MONTH, Calendar.JUNE);
+        } else if (month.equalsIgnoreCase("may")) {
+          calendar.set(Calendar.MONTH, Calendar.MAY);
+        } else if (month.equalsIgnoreCase("apr")) {
+          calendar.set(Calendar.MONTH, Calendar.APRIL);
+        } else if (month.equalsIgnoreCase("mar")) {
+          calendar.set(Calendar.MONTH, Calendar.MARCH);
+        } else if (month.equalsIgnoreCase("feb")) {
+          calendar.set(Calendar.MONTH, Calendar.FEBRUARY);
+        } else if (month.equalsIgnoreCase("jan")) {
+          calendar.set(Calendar.MONTH, Calendar.JANUARY);
         }
-    }
 
 
-    public int run(String[] args) throws Exception
-    {
-        // Create a configuration
-        Configuration conf = getConf();
-
-        // Create a job from the default configuration that will use the WordCount class
-        JobConf job = new JobConf( conf, LogCountsPerHour.class );
-
-        // Define our input path as the first command line argument and our output path as the second
-        Path in = new Path( args[0] );
-        Path out = new Path( args[1] );
-
-        // Create File Input/Output formats for these paths (in the job)
-        FileInputFormat.setInputPaths( job, in );
-        FileOutputFormat.setOutputPath( job, out );
-
-        // Configure the job: name, mapper, reducer, and combiner
-        job.setJobName( "LogAveragePerHour" );
-        job.setMapperClass( LogMapClass.class );
-        job.setReducerClass( LogReduce.class );
-        job.setCombinerClass( LogReduce.class );
-
-        // Configure the output
-        job.setOutputFormat( TextOutputFormat.class );
-        job.setOutputKeyClass( DateWritable.class );
-        job.setOutputValueClass( IntWritable.class );
-
-        // Run the job
-        JobClient.runJob(job);
-        return 0;
+        // Output the date as the key and 1 as the value
+        date.setDate(calendar.getTime());
+        output.collect(date, one);
+      }
     }
+  }
 
-    public static void main(String[] args) throws Exception
+  public static class LogReduce extends MapReduceBase
+      implements Reducer<DateWritable, IntWritable, DateWritable, IntWritable>
+  {
+    public void reduce(DateWritable key, Iterator<IntWritable> values, OutputCollector<DateWritable, IntWritable> output, Reporter reporter) throws IOException
     {
-        // Start the LogCountsPerHour MapReduce application
-        int res = ToolRunner.run( new Configuration(),
-                new LogCountsPerHour(),
-                args );
-        System.exit( res );
+      // Iterate over all of the values (counts of occurrences of this word)
+      int count = 0;
+      while (values.hasNext()) {
+        // Add the value to our count
+        count += values.next().get();
+      }
+
+      // Output the word with its count (wrapped in an IntWritable)
+      output.collect(key, new IntWritable(count));
     }
+  }
+
+
+  public int run(String[] args) throws Exception
+  {
+    // Create a configuration
+    Configuration conf = getConf();
+
+    // Create a job from the default configuration that will use the WordCount class
+    JobConf job = new JobConf(conf, LogCountsPerHour.class);
+
+    // Define our input path as the first command line argument and our output path as the second
+    Path in = new Path(args[0]);
+    Path out = new Path(args[1]);
+
+    // Create File Input/Output formats for these paths (in the job)
+    FileInputFormat.setInputPaths(job, in);
+    FileOutputFormat.setOutputPath(job, out);
+
+    // Configure the job: name, mapper, reducer, and combiner
+    job.setJobName("LogAveragePerHour");
+    job.setMapperClass(LogMapClass.class);
+    job.setReducerClass(LogReduce.class);
+    job.setCombinerClass(LogReduce.class);
+
+    // Configure the output
+    job.setOutputFormat(TextOutputFormat.class);
+    job.setOutputKeyClass(DateWritable.class);
+    job.setOutputValueClass(IntWritable.class);
+
+    // Run the job
+    JobClient.runJob(job);
+    return 0;
+  }
+
+  public static void main(String[] args) throws Exception
+  {
+    // Start the LogCountsPerHour MapReduce application
+    int res = ToolRunner.run(new Configuration(), new LogCountsPerHour(), args);
+    System.exit(res);
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogsCountApplication.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogsCountApplication.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogsCountApplication.java
index cbe5566..2d647ed 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogsCountApplication.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/LogsCountApplication.java
@@ -30,7 +30,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * @since 0.9.0
  */
-@ApplicationAnnotation(name="LogsCountDemo")
+@ApplicationAnnotation(name = "LogsCountDemo")
 public class LogsCountApplication extends MapReduceApplication<LongWritable, Text, DateWritable, IntWritable>
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/MapOperator.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/MapOperator.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/MapOperator.java
index b8023f5..509f6ae 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/MapOperator.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/MapOperator.java
@@ -22,18 +22,35 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 
 import javax.validation.constraints.Min;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
-import org.apache.hadoop.mapred.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.KeyValueTextInputFormat;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
 
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
@@ -123,8 +140,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
     if (reader == null) {
       try {
         reader = inputFormat.getRecordReader(inputSplit, new JobConf(new Configuration()), reporter);
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         logger.info("error getting record reader {}", e.getMessage());
       }
     }
@@ -150,11 +166,10 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
       SerializationFactory serializationFactory = new SerializationFactory(conf);
       Deserializer keyDesiralizer = serializationFactory.getDeserializer(inputSplitClass);
       keyDesiralizer.open(new ByteArrayInputStream(outstream.toByteArray()));
-      inputSplit = (InputSplit) keyDesiralizer.deserialize(null);
-      ((ReporterImpl) reporter).setInputSplit(inputSplit);
+      inputSplit = (InputSplit)keyDesiralizer.deserialize(null);
+      ((ReporterImpl)reporter).setInputSplit(inputSplit);
       reader = inputFormat.getRecordReader(inputSplit, new JobConf(conf), reporter);
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       logger.info("failed to initialize inputformat obj {}", inputFormat);
       throw new RuntimeException(e);
     }
@@ -172,8 +187,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
     if (mapClass != null) {
       try {
         mapObject = mapClass.newInstance();
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         logger.info("can't instantiate object {}", e.getMessage());
       }
 
@@ -182,8 +196,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
     if (combineClass != null) {
       try {
         combineObject = combineClass.newInstance();
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         logger.info("can't instantiate object {}", e.getMessage());
       }
       combineObject.configure(jobConf);
@@ -202,15 +215,14 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
           KeyHashValPair<K1, V1> keyValue = new KeyHashValPair<K1, V1>(key, val);
           mapObject.map(keyValue.getKey(), keyValue.getValue(), outputCollector, reporter);
           if (combineObject == null) {
-            List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>) outputCollector).getList();
+            List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>)outputCollector).getList();
             for (KeyHashValPair<K2, V2> e : list) {
               output.emit(e);
             }
             list.clear();
           }
         }
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         logger.debug(ex.toString());
         throw new RuntimeException(ex);
       }
@@ -220,7 +232,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
   @Override
   public void endWindow()
   {
-    List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>) outputCollector).getList();
+    List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>)outputCollector).getList();
     if (combineObject != null) {
       Map<K2, List<V2>> cacheObject = new HashMap<K2, List<V2>>();
       for (KeyHashValPair<K2, V2> tuple : list) {
@@ -229,8 +241,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
           cacheList = new ArrayList<V2>();
           cacheList.add(tuple.getValue());
           cacheObject.put(tuple.getKey(), cacheList);
-        }
-        else {
+        } else {
           cacheList.add(tuple.getValue());
         }
       }
@@ -239,12 +250,11 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
       for (Map.Entry<K2, List<V2>> e : cacheObject.entrySet()) {
         try {
           combineObject.reduce(e.getKey(), e.getValue().iterator(), tempOutputCollector, reporter);
-        }
-        catch (IOException e1) {
+        } catch (IOException e1) {
           logger.info(e1.getMessage());
         }
       }
-      list = ((OutputCollectorImpl<K2, V2>) tempOutputCollector).getList();
+      list = ((OutputCollectorImpl<K2, V2>)tempOutputCollector).getList();
       for (KeyHashValPair<K2, V2> e : list) {
         output.emit(e);
       }
@@ -261,14 +271,13 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
   {
     FileInputFormat.setInputPaths(conf, new Path(path));
     if (inputFormat == null) {
-        inputFormat = inputFormatClass.newInstance();
-        String inputFormatClassName = inputFormatClass.getName();
-        if (inputFormatClassName.equals("org.apache.hadoop.mapred.TextInputFormat")) {
-          ((TextInputFormat) inputFormat).configure(conf);
-        }
-        else if (inputFormatClassName.equals("org.apache.hadoop.mapred.KeyValueTextInputFormat")) {
-          ((KeyValueTextInputFormat) inputFormat).configure(conf);
-        }
+      inputFormat = inputFormatClass.newInstance();
+      String inputFormatClassName = inputFormatClass.getName();
+      if (inputFormatClassName.equals("org.apache.hadoop.mapred.TextInputFormat")) {
+        ((TextInputFormat)inputFormat).configure(conf);
+      } else if (inputFormatClassName.equals("org.apache.hadoop.mapred.KeyValueTextInputFormat")) {
+        ((KeyValueTextInputFormat)inputFormat).configure(conf);
+      }
     }
     return inputFormat.getSplits(conf, numSplits);
     // return null;
@@ -296,8 +305,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
       InputSplit[] splits;
       try {
         splits = getSplits(new JobConf(conf), tempPartitionCount, template.getPartitionedInstance().getDirName());
-      }
-      catch (Exception e1) {
+      } catch (Exception e1) {
         logger.info(" can't get splits {}", e1.getMessage());
         throw new RuntimeException(e1);
       }
@@ -316,8 +324,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
           keySerializer.open(opr.getOutstream());
           keySerializer.serialize(splits[size - 1]);
           opr.setInputSplitClass(splits[size - 1].getClass());
-        }
-        catch (IOException e) {
+        } catch (IOException e) {
           logger.info("error while serializing {}", e.getMessage());
         }
         size--;
@@ -333,8 +340,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
           keySerializer.open(opr.getOutstream());
           keySerializer.serialize(splits[size - 1]);
           opr.setInputSplitClass(splits[size - 1].getClass());
-        }
-        catch (IOException e) {
+        } catch (IOException e) {
           logger.info("error while serializing {}", e.getMessage());
         }
         size--;
@@ -342,8 +348,7 @@ public class MapOperator<K1, V1, K2, V2>  implements InputOperator, Partitioner<
       }
       try {
         keySerializer.close();
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException(e);
       }
       return operList;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/NewWordCountApplication.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/NewWordCountApplication.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/NewWordCountApplication.java
index 45f9005..b0ea7d8 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/NewWordCountApplication.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/NewWordCountApplication.java
@@ -30,15 +30,15 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * @since 0.9.0
  */
-@ApplicationAnnotation(name="WordCountDemo")
-public class NewWordCountApplication extends MapReduceApplication<LongWritable, Text, Text, IntWritable> {
-
-	public void NewWordCountApplication() {
-		setMapClass(WordCount.Map.class);
-		setReduceClass(WordCount.Reduce.class);
-		setCombineClass(WordCount.Reduce.class);
-		setInputFormat(TextInputFormat.class);
-
-	}
+@ApplicationAnnotation(name = "WordCountDemo")
+public class NewWordCountApplication extends MapReduceApplication<LongWritable, Text, Text, IntWritable>
+{
 
+  public void NewWordCountApplication()
+  {
+    setMapClass(WordCount.Map.class);
+    setReduceClass(WordCount.Reduce.class);
+    setCombineClass(WordCount.Reduce.class);
+    setInputFormat(TextInputFormat.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/OutputCollectorImpl.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/OutputCollectorImpl.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/OutputCollectorImpl.java
index b380553..6c81724 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/OutputCollectorImpl.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/OutputCollectorImpl.java
@@ -24,14 +24,14 @@ import java.io.PipedOutputStream;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import com.datatorrent.lib.util.KeyHashValPair;
 
 /**
@@ -40,50 +40,55 @@ import com.datatorrent.lib.util.KeyHashValPair;
  * @since 0.9.0
  */
 @SuppressWarnings("unchecked")
-public class OutputCollectorImpl<K extends Object, V extends Object> implements OutputCollector<K, V> {
-	private static final Logger logger = LoggerFactory.getLogger(OutputCollectorImpl.class);
+public class OutputCollectorImpl<K extends Object, V extends Object> implements OutputCollector<K, V>
+{
+  private static final Logger logger = LoggerFactory.getLogger(OutputCollectorImpl.class);
 
-	private List<KeyHashValPair<K, V>> list = new ArrayList<KeyHashValPair<K, V>>();
+  private List<KeyHashValPair<K, V>> list = new ArrayList<KeyHashValPair<K, V>>();
 
-	public List<KeyHashValPair<K, V>> getList() {
-		return list;
-	}
+  public List<KeyHashValPair<K, V>> getList()
+  {
+    return list;
+  }
 
-	private transient SerializationFactory serializationFactory;
-	private transient Configuration conf = null;
+  private transient SerializationFactory serializationFactory;
+  private transient Configuration conf = null;
 
-	public OutputCollectorImpl() {
-		conf = new Configuration();
-		serializationFactory = new SerializationFactory(conf);
+  public OutputCollectorImpl()
+  {
+    conf = new Configuration();
+    serializationFactory = new SerializationFactory(conf);
 
-	}
+  }
 
-	private <T> T cloneObj(T t) throws IOException {
-		Serializer<T> keySerializer;
-		Class<T> keyClass;
-		PipedInputStream pis = new PipedInputStream();
-		PipedOutputStream pos = new PipedOutputStream(pis);
-		keyClass = (Class<T>) t.getClass();
-		keySerializer = serializationFactory.getSerializer(keyClass);
-		keySerializer.open(pos);
-		keySerializer.serialize(t);
-		Deserializer<T> keyDesiralizer = serializationFactory.getDeserializer(keyClass);
-		keyDesiralizer.open(pis);
-		T clonedArg0 = keyDesiralizer.deserialize(null);
-		pos.close();
-		pis.close();
-		keySerializer.close();
-		keyDesiralizer.close();
-		return clonedArg0;
+  private <T> T cloneObj(T t) throws IOException
+  {
+    Serializer<T> keySerializer;
+    Class<T> keyClass;
+    PipedInputStream pis = new PipedInputStream();
+    PipedOutputStream pos = new PipedOutputStream(pis);
+    keyClass = (Class<T>)t.getClass();
+    keySerializer = serializationFactory.getSerializer(keyClass);
+    keySerializer.open(pos);
+    keySerializer.serialize(t);
+    Deserializer<T> keyDesiralizer = serializationFactory.getDeserializer(keyClass);
+    keyDesiralizer.open(pis);
+    T clonedArg0 = keyDesiralizer.deserialize(null);
+    pos.close();
+    pis.close();
+    keySerializer.close();
+    keyDesiralizer.close();
+    return clonedArg0;
 
-	}
+  }
 
-	@Override
-	public void collect(K arg0, V arg1) throws IOException {
-		if (conf == null) {
-			conf = new Configuration();
-			serializationFactory = new SerializationFactory(conf);
-		}
-		list.add(new KeyHashValPair<K, V>(cloneObj(arg0), cloneObj(arg1)));
-	}
+  @Override
+  public void collect(K arg0, V arg1) throws IOException
+  {
+    if (conf == null) {
+      conf = new Configuration();
+      serializationFactory = new SerializationFactory(conf);
+    }
+    list.add(new KeyHashValPair<K, V>(cloneObj(arg0), cloneObj(arg1)));
+  }
 }


[6/6] apex-malhar git commit: APEXMALHAR-2200 Enabled checkstyle for demos.

Posted by th...@apache.org.
APEXMALHAR-2200 Enabled checkstyle for demos.


Project: http://git-wip-us.apache.org/repos/asf/apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/apex-malhar/commit/846b4a36
Tree: http://git-wip-us.apache.org/repos/asf/apex-malhar/tree/846b4a36
Diff: http://git-wip-us.apache.org/repos/asf/apex-malhar/diff/846b4a36

Branch: refs/heads/master
Commit: 846b4a36228ee8f6eb941948ef70b7c78ffb7739
Parents: 0a1adff
Author: Shunxin <lu...@hotmail.com>
Authored: Fri Aug 26 13:31:45 2016 -0700
Committer: Shunxin <lu...@hotmail.com>
Committed: Fri Aug 26 14:07:31 2016 -0700

----------------------------------------------------------------------
 demos/pom.xml | 7 -------
 1 file changed, 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/846b4a36/demos/pom.xml
----------------------------------------------------------------------
diff --git a/demos/pom.xml b/demos/pom.xml
index 3528e7a2..839a978 100644
--- a/demos/pom.xml
+++ b/demos/pom.xml
@@ -160,13 +160,6 @@
             </execution>
           </executions>
         </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-checkstyle-plugin</artifactId>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
       </plugins>
 	</build>
 	</profile>


[2/6] apex-malhar git commit: Fixed checkstyle errors for demos.

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReduceOperator.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReduceOperator.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReduceOperator.java
index 32c7ccb..5df9b0d 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReduceOperator.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReduceOperator.java
@@ -25,14 +25,15 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
@@ -47,129 +48,142 @@ import com.datatorrent.lib.util.KeyHashValPair;
  * @since 0.9.0
  */
 @SuppressWarnings({ "deprecation", "unused" })
-public class ReduceOperator<K1, V1, K2, V2> implements Operator {
-	private static final Logger logger = LoggerFactory.getLogger(ReduceOperator.class);
-
-	private Class<? extends Reducer<K1, V1, K2, V2>> reduceClass;
-	private transient Reducer<K1, V1, K2, V2> reduceObj;
-	private transient Reporter reporter;
-	private OutputCollector<K2, V2> outputCollector;
-	private String configFile;
-
-	public Class<? extends Reducer<K1, V1, K2, V2>> getReduceClass() {
-		return reduceClass;
-	}
-
-	public void setReduceClass(Class<? extends Reducer<K1, V1, K2, V2>> reduceClass) {
-		this.reduceClass = reduceClass;
-	}
-
-	public String getConfigFile() {
-		return configFile;
-	}
-
-	public void setConfigFile(String configFile) {
-		this.configFile = configFile;
-	}
-
-	private int numberOfMappersRunning = -1;
-	private int operatorId;
-
-	public final transient DefaultInputPort<KeyHashValPair<Integer, Integer>> inputCount = new DefaultInputPort<KeyHashValPair<Integer, Integer>>() {
-		@Override
-		public void process(KeyHashValPair<Integer, Integer> tuple) {
-			logger.info("processing {}", tuple);
-			if (numberOfMappersRunning == -1)
-				numberOfMappersRunning = tuple.getValue();
-			else
-				numberOfMappersRunning += tuple.getValue();
-
-		}
-
-	};
-
-	public final transient DefaultOutputPort<KeyHashValPair<K2, V2>> output = new DefaultOutputPort<KeyHashValPair<K2, V2>>();
-	private Map<K1, List<V1>> cacheObject;
-	public final transient DefaultInputPort<KeyHashValPair<K1, V1>> input = new DefaultInputPort<KeyHashValPair<K1, V1>>() {
-
-		@Override
-		public void process(KeyHashValPair<K1, V1> tuple) {
-			// logger.info("processing tupple {}",tuple);
-			List<V1> list = cacheObject.get(tuple.getKey());
-			if (list == null) {
-				list = new ArrayList<V1>();
-				list.add(tuple.getValue());
-				cacheObject.put(tuple.getKey(), list);
-			} else {
-				list.add(tuple.getValue());
-			}
-		}
-
-	};
-
-	@Override
-	public void setup(OperatorContext context) {
-		reporter = new ReporterImpl(ReporterType.Reducer, new Counters());
-		if(context != null){
-			operatorId = context.getId();
-		}
-		cacheObject = new HashMap<K1, List<V1>>();
-		outputCollector = new OutputCollectorImpl<K2, V2>();
-		if (reduceClass != null) {
-			try {
-				reduceObj = reduceClass.newInstance();
-			} catch (Exception e) {
-				logger.info("can't instantiate object {}", e.getMessage());
-				throw new RuntimeException(e);
-			}
-			Configuration conf = new Configuration();
-			InputStream stream = null;
-			if (configFile != null && configFile.length() > 0) {
-				logger.info("system /{}", configFile);
-				stream = ClassLoader.getSystemResourceAsStream("/" + configFile);
-				if (stream == null) {
-					logger.info("system {}", configFile);
-					stream = ClassLoader.getSystemResourceAsStream(configFile);
-				}
-			}
-			if (stream != null) {
-				logger.info("found our stream... so adding it");
-				conf.addResource(stream);
-			}
-			reduceObj.configure(new JobConf(conf));
-		}
-
-	}
-
-	@Override
-	public void teardown() {
-
-	}
-
-	@Override
-	public void beginWindow(long windowId) {
-
-	}
-
-	@Override
-	public void endWindow() {
-		if (numberOfMappersRunning == 0) {
-			for (Map.Entry<K1, List<V1>> e : cacheObject.entrySet()) {
-				try {
-					reduceObj.reduce(e.getKey(), e.getValue().iterator(), outputCollector, reporter);
-				} catch (IOException e1) {
-					logger.info(e1.getMessage());
-					throw new RuntimeException(e1);
-				}
-			}
-			List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>) outputCollector).getList();
-			for (KeyHashValPair<K2, V2> e : list) {
-				output.emit(e);				
-			}
-			list.clear();
-			cacheObject.clear();
-			numberOfMappersRunning = -1;
-		}
-	}
+public class ReduceOperator<K1, V1, K2, V2> implements Operator
+{
+  private static final Logger logger = LoggerFactory.getLogger(ReduceOperator.class);
+
+  private Class<? extends Reducer<K1, V1, K2, V2>> reduceClass;
+  private transient Reducer<K1, V1, K2, V2> reduceObj;
+  private transient Reporter reporter;
+  private OutputCollector<K2, V2> outputCollector;
+  private String configFile;
+
+  public Class<? extends Reducer<K1, V1, K2, V2>> getReduceClass()
+  {
+    return reduceClass;
+  }
+
+  public void setReduceClass(Class<? extends Reducer<K1, V1, K2, V2>> reduceClass)
+  {
+    this.reduceClass = reduceClass;
+  }
+
+  public String getConfigFile()
+  {
+    return configFile;
+  }
+
+  public void setConfigFile(String configFile)
+  {
+    this.configFile = configFile;
+  }
+
+  private int numberOfMappersRunning = -1;
+  private int operatorId;
+
+  public final transient DefaultInputPort<KeyHashValPair<Integer, Integer>> inputCount = new DefaultInputPort<KeyHashValPair<Integer, Integer>>()
+  {
+    @Override
+    public void process(KeyHashValPair<Integer, Integer> tuple)
+    {
+      logger.info("processing {}", tuple);
+      if (numberOfMappersRunning == -1) {
+        numberOfMappersRunning = tuple.getValue();
+      } else {
+        numberOfMappersRunning += tuple.getValue();
+      }
+
+    }
+
+  };
+
+  public final transient DefaultOutputPort<KeyHashValPair<K2, V2>> output = new DefaultOutputPort<KeyHashValPair<K2, V2>>();
+  private Map<K1, List<V1>> cacheObject;
+  public final transient DefaultInputPort<KeyHashValPair<K1, V1>> input = new DefaultInputPort<KeyHashValPair<K1, V1>>()
+  {
+    @Override
+    public void process(KeyHashValPair<K1, V1> tuple)
+    {
+      // logger.info("processing tupple {}",tuple);
+      List<V1> list = cacheObject.get(tuple.getKey());
+      if (list == null) {
+        list = new ArrayList<V1>();
+        list.add(tuple.getValue());
+        cacheObject.put(tuple.getKey(), list);
+      } else {
+        list.add(tuple.getValue());
+      }
+    }
+
+  };
+
+  @Override
+  public void setup(OperatorContext context)
+  {
+    reporter = new ReporterImpl(ReporterType.Reducer, new Counters());
+    if (context != null) {
+      operatorId = context.getId();
+    }
+    cacheObject = new HashMap<K1, List<V1>>();
+    outputCollector = new OutputCollectorImpl<K2, V2>();
+    if (reduceClass != null) {
+      try {
+        reduceObj = reduceClass.newInstance();
+      } catch (Exception e) {
+        logger.info("can't instantiate object {}", e.getMessage());
+        throw new RuntimeException(e);
+      }
+      Configuration conf = new Configuration();
+      InputStream stream = null;
+      if (configFile != null && configFile.length() > 0) {
+        logger.info("system /{}", configFile);
+        stream = ClassLoader.getSystemResourceAsStream("/" + configFile);
+        if (stream == null) {
+          logger.info("system {}", configFile);
+          stream = ClassLoader.getSystemResourceAsStream(configFile);
+        }
+      }
+      if (stream != null) {
+        logger.info("found our stream... so adding it");
+        conf.addResource(stream);
+      }
+      reduceObj.configure(new JobConf(conf));
+    }
+
+  }
+
+  @Override
+  public void teardown()
+  {
+
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+
+  }
+
+  @Override
+  public void endWindow()
+  {
+    if (numberOfMappersRunning == 0) {
+      for (Map.Entry<K1, List<V1>> e : cacheObject.entrySet()) {
+        try {
+          reduceObj.reduce(e.getKey(), e.getValue().iterator(), outputCollector, reporter);
+        } catch (IOException e1) {
+          logger.info(e1.getMessage());
+          throw new RuntimeException(e1);
+        }
+      }
+      List<KeyHashValPair<K2, V2>> list = ((OutputCollectorImpl<K2, V2>)outputCollector).getList();
+      for (KeyHashValPair<K2, V2> e : list) {
+        output.emit(e);
+      }
+      list.clear();
+      cacheObject.clear();
+      numberOfMappersRunning = -1;
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReporterImpl.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReporterImpl.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReporterImpl.java
index 1eb3bdd..d2d38da 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReporterImpl.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/ReporterImpl.java
@@ -18,8 +18,8 @@
  */
 package com.datatorrent.demos.mroperator;
 
-import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.Reporter;
 
@@ -28,81 +28,92 @@ import org.apache.hadoop.mapred.Reporter;
  *
  * @since 0.9.0
  */
-public class ReporterImpl implements Reporter {
-
-	private Counters counters;
-	InputSplit inputSplit;
-
-	public enum ReporterType {
-		Mapper, Reducer
-	}
-
-	private ReporterType typ;
-
-	public ReporterImpl(final ReporterType kind, final Counters ctrs) {
-		this.typ = kind;
-		this.counters = ctrs;
-	}
-
-	@Override
-	public InputSplit getInputSplit() {
-		if (typ == ReporterType.Reducer) {
-			throw new UnsupportedOperationException("Reducer cannot call getInputSplit()");
-		} else {
-			return inputSplit;
-		}
-	}
-
-	public void setInputSplit(InputSplit inputSplit) {
-		this.inputSplit = inputSplit;
-	}
-
-	@Override
-	public void incrCounter(Enum<?> key, long amount) {
-		if (null != counters) {
-			counters.incrCounter(key, amount);
-		}
-	}
-
-	@Override
-	public void incrCounter(String group, String counter, long amount) {
-		if (null != counters) {
-			counters.incrCounter(group, counter, amount);
-		}
-	}
-
-	@Override
-	public void setStatus(String status) {
-		// do nothing.
-	}
-
-	@Override
-	public void progress() {
-		// do nothing.
-	}
-
-	@Override
-	public Counter getCounter(String group, String name) {
-		Counters.Counter counter = null;
-		if (counters != null) {
-			counter = counters.findCounter(group, name);
-		}
-
-		return counter;
-	}
-
-	@Override
-	public Counter getCounter(Enum<?> key) {
-		Counters.Counter counter = null;
-		if (counters != null) {
-			counter = counters.findCounter(key);
-		}
-
-		return counter;
-	}
-
-	public float getProgress() {
-		return 0;
-	}
+public class ReporterImpl implements Reporter
+{
+  private Counters counters;
+  InputSplit inputSplit;
+
+  public enum ReporterType
+  {
+    Mapper, Reducer
+  }
+
+  private ReporterType typ;
+
+  public ReporterImpl(final ReporterType kind, final Counters ctrs)
+  {
+    this.typ = kind;
+    this.counters = ctrs;
+  }
+
+  @Override
+  public InputSplit getInputSplit()
+  {
+    if (typ == ReporterType.Reducer) {
+      throw new UnsupportedOperationException("Reducer cannot call getInputSplit()");
+    } else {
+      return inputSplit;
+    }
+  }
+
+  public void setInputSplit(InputSplit inputSplit)
+  {
+    this.inputSplit = inputSplit;
+  }
+
+  @Override
+  public void incrCounter(Enum<?> key, long amount)
+  {
+    if (null != counters) {
+      counters.incrCounter(key, amount);
+    }
+  }
+
+  @Override
+  public void incrCounter(String group, String counter, long amount)
+  {
+    if (null != counters) {
+      counters.incrCounter(group, counter, amount);
+    }
+  }
+
+  @Override
+  public void setStatus(String status)
+  {
+    // do nothing.
+  }
+
+  @Override
+  public void progress()
+  {
+    // do nothing.
+  }
+
+  @Override
+  public Counter getCounter(String group, String name)
+  {
+    Counters.Counter counter = null;
+    if (counters != null) {
+      counter = counters.findCounter(group, name);
+    }
+
+    return counter;
+  }
+
+  @Override
+  public Counter getCounter(Enum<?> key)
+  {
+    Counters.Counter counter = null;
+    if (counters != null) {
+      counter = counters.findCounter(key);
+    }
+
+    return counter;
+  }
+
+  public float getProgress()
+  {
+    return 0;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/WordCount.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/WordCount.java b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/WordCount.java
index d5cbdb0..f78cf99 100644
--- a/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/WordCount.java
+++ b/demos/mroperator/src/main/java/com/datatorrent/demos/mroperator/WordCount.java
@@ -19,13 +19,24 @@
 package com.datatorrent.demos.mroperator;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.Iterator;
+import java.util.StringTokenizer;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.util.*;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.mapred.TextOutputFormat;
 
 /**
  * <p>WordCount class.</p>
@@ -38,7 +49,7 @@ public class WordCount
 
   public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable>
   {
-    private final static IntWritable one = new IntWritable(1);
+    private static final IntWritable one = new IntWritable(1);
     private Text word = new Text();
 
     public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/MapOperatorTest.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/MapOperatorTest.java b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/MapOperatorTest.java
index e8c71c3..0f330e8 100644
--- a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/MapOperatorTest.java
+++ b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/MapOperatorTest.java
@@ -23,6 +23,15 @@ import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
 
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -35,13 +44,6 @@ import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestWatcher;
-import org.junit.runner.Description;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -118,8 +120,7 @@ public class MapOperatorTest
       testDir = baseDir + "/" + methodName;
       try {
         FileUtils.forceMkdir(new File(testDir));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
       createFile(testDir + "/" + file1, "1\n2\n3\n1\n2\n3\n");
@@ -131,16 +132,13 @@ public class MapOperatorTest
       try {
         output = new BufferedWriter(new FileWriter(new File(fileName)));
         output.write(data);
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
-      }
-      finally {
+      } finally {
         if (output != null) {
           try {
             output.close();
-          }
-          catch (IOException ex) {
+          } catch (IOException ex) {
             LOG.error("not able to close the output stream: ", ex);
           }
         }
@@ -152,8 +150,7 @@ public class MapOperatorTest
     {
       try {
         FileUtils.deleteDirectory(new File(baseDir));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/ReduceOperatorTest.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/ReduceOperatorTest.java b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/ReduceOperatorTest.java
index 9ad5637..b85f8ad 100644
--- a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/ReduceOperatorTest.java
+++ b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/ReduceOperatorTest.java
@@ -18,55 +18,57 @@
  */
 package com.datatorrent.demos.mroperator;
 
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.KeyHashValPair;
 
-public class ReduceOperatorTest {
-
-	 private static Logger logger = LoggerFactory.getLogger(ReduceOperatorTest.class);
-
-	/**
-	 * Test node logic emits correct results
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception {
-		testNodeProcessingSchema(new ReduceOperator<Text, IntWritable,Text, IntWritable>());
-	}
+public class ReduceOperatorTest
+{
+  private static Logger logger = LoggerFactory.getLogger(ReduceOperatorTest.class);
 
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	public void testNodeProcessingSchema(ReduceOperator<Text, IntWritable,Text, IntWritable> oper) {
+  /**
+   * Test node logic emits correct results
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new ReduceOperator<Text, IntWritable,Text, IntWritable>());
+  }
 
-		oper.setReduceClass(WordCount.Reduce.class);
-		oper.setConfigFile(null);
-		oper.setup(null);
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  public void testNodeProcessingSchema(ReduceOperator<Text, IntWritable,Text, IntWritable> oper)
+  {
+    oper.setReduceClass(WordCount.Reduce.class);
+    oper.setConfigFile(null);
+    oper.setup(null);
 
-		CollectorTestSink sortSink = new CollectorTestSink();
+    CollectorTestSink sortSink = new CollectorTestSink();
     oper.output.setSink(sortSink);
 
-		oper.beginWindow(0);
-		oper.inputCount.process(new KeyHashValPair<Integer, Integer>(1, 1));
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
-		oper.endWindow();
+    oper.beginWindow(0);
+    oper.inputCount.process(new KeyHashValPair<Integer, Integer>(1, 1));
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
+    oper.endWindow();
 
-		oper.beginWindow(1);
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
-		oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
-		oper.inputCount.process(new KeyHashValPair<Integer, Integer>(1, -1));
-		oper.endWindow();
-		Assert.assertEquals("number emitted tuples", 2, sortSink.collectedTuples.size());
-		for (Object o : sortSink.collectedTuples) {
+    oper.beginWindow(1);
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("one"), new IntWritable(1)));
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
+    oper.input.process(new KeyHashValPair<Text, IntWritable>(new Text("two"), new IntWritable(1)));
+    oper.inputCount.process(new KeyHashValPair<Integer, Integer>(1, -1));
+    oper.endWindow();
+    Assert.assertEquals("number emitted tuples", 2, sortSink.collectedTuples.size());
+    for (Object o : sortSink.collectedTuples) {
       logger.debug(o.toString());
     }
     logger.debug("Done testing round\n");
-	}
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/WordCountMRApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/WordCountMRApplicationTest.java b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/WordCountMRApplicationTest.java
index cb1521a..bd732c1 100644
--- a/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/WordCountMRApplicationTest.java
+++ b/demos/mroperator/src/test/java/com/datatorrent/demos/mroperator/WordCountMRApplicationTest.java
@@ -23,14 +23,15 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+
 import com.google.common.collect.Maps;
 
 import com.datatorrent.api.LocalMode;
@@ -58,7 +59,7 @@ public class WordCountMRApplicationTest
     List<String> readLines = FileUtils.readLines(new File(testMeta.testDir + "/output.txt"));
     Map<String,Integer> readMap = Maps.newHashMap();
     Iterator<String> itr = readLines.iterator();
-    while(itr.hasNext()){
+    while (itr.hasNext()) {
       String[] splits = itr.next().split("=");
       readMap.put(splits[0],Integer.valueOf(splits[1]));
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/Application.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/Application.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/Application.java
index 8f4dd92..55ffe92 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/Application.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/Application.java
@@ -20,13 +20,12 @@ package com.datatorrent.demos.pi;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.datatorrent.lib.io.ConsoleOutputOperator;
-import com.datatorrent.lib.testbench.RandomEventGenerator;
-
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
+import com.datatorrent.lib.io.ConsoleOutputOperator;
+import com.datatorrent.lib.testbench.RandomEventGenerator;
 
 /**
  * Monte Carlo PI estimation demo : <br>
@@ -75,7 +74,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="PiDemo")
+@ApplicationAnnotation(name = "PiDemo")
 public class Application implements StreamingApplication
 {
   private final Locality locality = null;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationAppData.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationAppData.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationAppData.java
index 57c5249..328bb10 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationAppData.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationAppData.java
@@ -84,7 +84,7 @@ import com.datatorrent.lib.testbench.RandomEventGenerator;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="PiDemoAppData")
+@ApplicationAnnotation(name = "PiDemoAppData")
 public class ApplicationAppData implements StreamingApplication
 {
   public static final String SNAPSHOT_SCHEMA = "PiDemoDataSchema.json";

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationWithScript.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationWithScript.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationWithScript.java
index 3ed376f..0796608 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationWithScript.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/ApplicationWithScript.java
@@ -18,16 +18,15 @@
  */
 package com.datatorrent.demos.pi;
 
-
 import org.apache.hadoop.conf.Configuration;
 
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.StreamingApplication;
+import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.script.JavaScriptOperator;
 import com.datatorrent.lib.stream.RoundRobinHashMap;
 import com.datatorrent.lib.testbench.RandomEventGenerator;
-import com.datatorrent.api.DAG;
-import com.datatorrent.api.StreamingApplication;
-import com.datatorrent.api.annotation.ApplicationAnnotation;
 
 /**
  * Monte Carlo PI estimation demo : <br>
@@ -78,7 +77,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="PiJavaScriptDemo")
+@ApplicationAnnotation(name = "PiJavaScriptDemo")
 public class ApplicationWithScript implements StreamingApplication
 {
 
@@ -92,13 +91,13 @@ public class ApplicationWithScript implements StreamingApplication
     rand.setMaxvalue(maxValue);
 
     RoundRobinHashMap<String,Object> rrhm = dag.addOperator("rrhm", new RoundRobinHashMap<String, Object>());
-    rrhm.setKeys(new String[] { "x", "y" });
+    rrhm.setKeys(new String[]{"x", "y"});
 
     JavaScriptOperator calc = dag.addOperator("picalc", new JavaScriptOperator());
     calc.setPassThru(false);
     calc.put("i",0);
     calc.put("count",0);
-    calc.addSetupScript("function pi() { if (x*x+y*y <= "+maxValue*maxValue+") { i++; } count++; return i / count * 4; }");
+    calc.addSetupScript("function pi() { if (x*x+y*y <= " + maxValue * maxValue + ") { i++; } count++; return i / count * 4; }");
 
     calc.setInvoke("pi");
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/Calculator.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/Calculator.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/Calculator.java
index 9363b88..221ecc0 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/Calculator.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/Calculator.java
@@ -40,7 +40,7 @@ import com.datatorrent.lib.testbench.RandomEventGenerator;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="PiLibraryDemo")
+@ApplicationAnnotation(name = "PiLibraryDemo")
 public class Calculator implements StreamingApplication
 {
   @Override
@@ -56,7 +56,7 @@ public class Calculator implements StreamingApplication
     AbstractAggregator<Integer> pairOperator = dag.addOperator("PairXY", new ArrayListAggregator<Integer>());
     Sigma<Integer> sumOperator = dag.addOperator("SumXY", new Sigma<Integer>());
     LogicalCompareToConstant<Integer> comparator = dag.addOperator("AnalyzeLocation", new LogicalCompareToConstant<Integer>());
-    comparator.setConstant(30000 *30000);
+    comparator.setConstant(30000 * 30000);
     Counter inCircle = dag.addOperator("CountInCircle", Counter.class);
     Counter inSquare = dag.addOperator("CountInSquare", Counter.class);
     Division division = dag.addOperator("Ratio", Division.class);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/NamedValueList.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/NamedValueList.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/NamedValueList.java
index ce5ef9d..c50e17e 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/NamedValueList.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/NamedValueList.java
@@ -25,10 +25,10 @@ import java.util.Map;
 
 import javax.validation.constraints.NotNull;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * <p>An operator which converts a raw value to a named value singleton list.</p>
@@ -47,9 +47,11 @@ public class NamedValueList<T> extends BaseOperator
   private List<Map<String, T>> valueList;
   private Map<String, T> valueMap;
 
-  public final transient DefaultInputPort<T> inPort = new DefaultInputPort<T>() {
+  public final transient DefaultInputPort<T> inPort = new DefaultInputPort<T>()
+  {
     @Override
-    public void process(T val) {
+    public void process(T val)
+    {
       valueMap.put(valueName, val);
       outPort.emit(valueList);
     }
@@ -80,11 +82,13 @@ public class NamedValueList<T> extends BaseOperator
   {
   }
 
-  public String getValueName() {
+  public String getValueName()
+  {
     return valueName;
   }
 
-  public void setValueName(String name) {
+  public void setValueName(String name)
+  {
     valueName = name;
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/main/java/com/datatorrent/demos/pi/PiCalculateOperator.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/main/java/com/datatorrent/demos/pi/PiCalculateOperator.java b/demos/pi/src/main/java/com/datatorrent/demos/pi/PiCalculateOperator.java
index 14edf19..8e61991 100644
--- a/demos/pi/src/main/java/com/datatorrent/demos/pi/PiCalculateOperator.java
+++ b/demos/pi/src/main/java/com/datatorrent/demos/pi/PiCalculateOperator.java
@@ -21,10 +21,10 @@ package com.datatorrent.demos.pi;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator implements Monte Carlo estimation of pi. For points randomly distributed points on
@@ -46,8 +46,7 @@ public class PiCalculateOperator extends BaseOperator
     {
       if (x == -1) {
         x = tuple;
-      }
-      else {
+      } else {
         y = tuple;
         if (x * x + y * y <= base) {
           inArea++;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/test/java/com/datatorrent/demos/pi/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/test/java/com/datatorrent/demos/pi/ApplicationTest.java b/demos/pi/src/test/java/com/datatorrent/demos/pi/ApplicationTest.java
index b61077a..d8881c2 100644
--- a/demos/pi/src/test/java/com/datatorrent/demos/pi/ApplicationTest.java
+++ b/demos/pi/src/test/java/com/datatorrent/demos/pi/ApplicationTest.java
@@ -18,13 +18,11 @@
  */
 package com.datatorrent.demos.pi;
 
-
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
 
 import com.datatorrent.api.LocalMode;
 
-
 /**
  *
  */
@@ -33,12 +31,12 @@ public class ApplicationTest
   @Test
   public void testSomeMethod() throws Exception
   {
-	  LocalMode lma = LocalMode.newInstance();
-	    Configuration conf =new Configuration(false);
-	    conf.addResource("dt-site-pi.xml");
-	    lma.prepareDAG(new Application(), conf);
-	    LocalMode.Controller lc = lma.getController();
-	    lc.run(10000);
-  
+    LocalMode lma = LocalMode.newInstance();
+    Configuration conf = new Configuration(false);
+    conf.addResource("dt-site-pi.xml");
+    lma.prepareDAG(new Application(), conf);
+    LocalMode.Controller lc = lma.getController();
+    lc.run(10000);
+
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java b/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
index cd52873..8e12fcc 100644
--- a/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
+++ b/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
@@ -18,9 +18,8 @@
  */
 package com.datatorrent.demos.pi;
 
-
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
 
 import com.datatorrent.api.LocalMode;
 
@@ -33,7 +32,7 @@ public class CalculatorTest
   public void testSomeMethod() throws Exception
   { 
     LocalMode lma = LocalMode.newInstance();
-    Configuration conf =new Configuration(false);
+    Configuration conf = new Configuration(false);
     conf.addResource("dt-site-pilibrary.xml");
     lma.prepareDAG(new Calculator(), conf);
     LocalMode.Controller lc = lma.getController();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/FaithfulRScript.java
----------------------------------------------------------------------
diff --git a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/FaithfulRScript.java b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/FaithfulRScript.java
index 8558554..cf49848 100755
--- a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/FaithfulRScript.java
+++ b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/FaithfulRScript.java
@@ -25,11 +25,10 @@ import java.util.List;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.contrib.r.RScript;
-
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.contrib.r.RScript;
 
 /**
  * @since 2.1.0
@@ -52,7 +51,8 @@ public class FaithfulRScript extends RScript
   }
 
   @InputPortFieldAnnotation(optional = true)
-  public final transient DefaultInputPort<FaithfulKey> faithfulInput = new DefaultInputPort<FaithfulKey>() {
+  public final transient DefaultInputPort<FaithfulKey> faithfulInput = new DefaultInputPort<FaithfulKey>()
+  {
     @Override
     public void process(FaithfulKey tuple)
     {
@@ -65,7 +65,8 @@ public class FaithfulRScript extends RScript
   };
 
   @InputPortFieldAnnotation(optional = true)
-  public final transient DefaultInputPort<Integer> inputElapsedTime = new DefaultInputPort<Integer>() {
+  public final transient DefaultInputPort<Integer> inputElapsedTime = new DefaultInputPort<Integer>()
+  {
     @Override
     public void process(Integer eT)
     {
@@ -82,9 +83,9 @@ public class FaithfulRScript extends RScript
   @Override
   public void endWindow()
   {
-
-    if (readingsList.size() == 0)
+    if (readingsList.size() == 0) {
       return;
+    }
     LOG.info("Input data size: readingsList - " + readingsList.size());
 
     double[] eruptionDuration = new double[readingsList.size()];
@@ -106,6 +107,5 @@ public class FaithfulRScript extends RScript
     super.process(map);
     readingsList.clear();
     map.clear();
-
-  };
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/InputGenerator.java
----------------------------------------------------------------------
diff --git a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/InputGenerator.java b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/InputGenerator.java
index 86abba7..c45cd50 100755
--- a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/InputGenerator.java
+++ b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/InputGenerator.java
@@ -82,8 +82,9 @@ public class InputGenerator implements InputOperator
   {
     int id;
     do {
-      id = (int) Math.abs(Math.round(random.nextGaussian() * max));
-    } while (id >= max);
+      id = (int)Math.abs(Math.round(random.nextGaussian() * max));
+    }
+    while (id >= max);
 
     if (id < min) {
       id = min;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplication.java
----------------------------------------------------------------------
diff --git a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplication.java b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplication.java
index 400e80c..0483767 100755
--- a/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplication.java
+++ b/demos/r/src/main/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplication.java
@@ -23,11 +23,10 @@ import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 
-import com.datatorrent.lib.io.ConsoleOutputOperator;
-
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
+import com.datatorrent.lib.io.ConsoleOutputOperator;
 
 /**
  * The application attempts to simulate 'Old Faithful Geyser" eruption.
@@ -38,7 +37,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  * waiting times and eruption duration values.
  * For every application window, it generates only one 'elapsed time' input for which the
  * prediction would be made.
- * Model in R is in file ruptionModel.R located at 
+ * Model in R is in file ruptionModel.R located at
  * demos/r/src/main/resources/com/datatorrent/demos/oldfaithful/ directory
  *
  * @since 2.1.0

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/r/src/test/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/r/src/test/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplicationTest.java b/demos/r/src/test/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplicationTest.java
index dc6a8cb..0bb1901 100755
--- a/demos/r/src/test/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplicationTest.java
+++ b/demos/r/src/test/java/com/datatorrent/demos/r/oldfaithful/OldFaithfulApplicationTest.java
@@ -29,7 +29,7 @@ import com.datatorrent.api.LocalMode;
 
 public class OldFaithfulApplicationTest
 {
-  
+
   private static final Logger LOG = LoggerFactory.getLogger(OldFaithfulApplicationTest.class);
 
   @Test

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/KinesisHashtagsApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/KinesisHashtagsApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/KinesisHashtagsApplication.java
index fd2a430..b9d32ab 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/KinesisHashtagsApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/KinesisHashtagsApplication.java
@@ -18,6 +18,9 @@
  */
 package com.datatorrent.demos.twitter;
 
+import java.net.URI;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.Operator.InputPort;
@@ -31,10 +34,7 @@ import com.datatorrent.contrib.twitter.TwitterSampleInput;
 import com.datatorrent.lib.algo.UniqueCounter;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.io.PubSubWebSocketOutputOperator;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 
-import java.net.URI;
 /**
  * Twitter Demo Application: <br>
  * This demo application samples random public status from twitter, send to Hashtag
@@ -167,7 +167,7 @@ import java.net.URI;
  *
  * @since 2.0.0
  */
-@ApplicationAnnotation(name="TwitterKinesisDemo")
+@ApplicationAnnotation(name = "TwitterKinesisDemo")
 public class KinesisHashtagsApplication implements StreamingApplication
 {
   private final Locality locality = null;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/SlidingContainer.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/SlidingContainer.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/SlidingContainer.java
index 9bd81a4..8b9f447 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/SlidingContainer.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/SlidingContainer.java
@@ -32,7 +32,7 @@ public class SlidingContainer<T> implements Serializable
   T identifier;
   int totalCount;
   int position;
-  int windowedCount[];
+  int[] windowedCount;
 
   @SuppressWarnings("unused")
   private SlidingContainer()

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpApplication.java
index f61f5be..9edce64 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpApplication.java
@@ -24,8 +24,6 @@ import java.sql.SQLException;
 import javax.annotation.Nonnull;
 
 import org.apache.hadoop.conf.Configuration;
-import twitter4j.Status;
-
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
@@ -34,6 +32,8 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.contrib.twitter.TwitterSampleInput;
 import com.datatorrent.lib.db.jdbc.AbstractJdbcTransactionableOutputOperator;
 
+import twitter4j.Status;
+
 /**
  * An application which connects to Twitter Sample Input and stores all the
  * tweets with their usernames in a mysql database. Please review the docs
@@ -63,7 +63,7 @@ import com.datatorrent.lib.db.jdbc.AbstractJdbcTransactionableOutputOperator;
  *
  * @since 0.9.4
  */
-@ApplicationAnnotation(name="TwitterDumpDemo")
+@ApplicationAnnotation(name = "TwitterDumpDemo")
 public class TwitterDumpApplication implements StreamingApplication
 {
   public static class Status2Database extends AbstractJdbcTransactionableOutputOperator<Status>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpHBaseApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpHBaseApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpHBaseApplication.java
index ecc412f..3adbbe0 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpHBaseApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterDumpHBaseApplication.java
@@ -23,14 +23,14 @@ import java.nio.ByteBuffer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Put;
 
-import com.datatorrent.contrib.hbase.AbstractHBasePutOutputOperator;
-import com.datatorrent.contrib.twitter.TwitterSampleInput;
-
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 
+import com.datatorrent.contrib.hbase.AbstractHBasePutOutputOperator;
+import com.datatorrent.contrib.twitter.TwitterSampleInput;
+
 import twitter4j.Status;
 
 /**
@@ -47,7 +47,7 @@ import twitter4j.Status;
  *
  * @since 1.0.2
  */
-@ApplicationAnnotation(name="TwitterDumpHBaseDemo")
+@ApplicationAnnotation(name = "TwitterDumpHBaseDemo")
 public class TwitterDumpHBaseApplication implements StreamingApplication
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusHashtagExtractor.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusHashtagExtractor.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusHashtagExtractor.java
index 5ed6774..d22db40 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusHashtagExtractor.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusHashtagExtractor.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.demos.twitter;
 
-import twitter4j.HashtagEntity;
-import twitter4j.Status;
-
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
+import twitter4j.HashtagEntity;
+import twitter4j.Status;
 
 /**
  * <p>TwitterStatusHashtagExtractor class.</p>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusURLExtractor.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusURLExtractor.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusURLExtractor.java
index ed4e207..6dbc436 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusURLExtractor.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusURLExtractor.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.demos.twitter;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 import twitter4j.Status;
 import twitter4j.URLEntity;
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusWordExtractor.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusWordExtractor.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusWordExtractor.java
index 1818dca..e05a37a 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusWordExtractor.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterStatusWordExtractor.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.demos.twitter;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.Arrays;
 import java.util.HashSet;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * <p>TwitterStatusWordExtractor class.</p>
  *
@@ -41,7 +41,7 @@ public class TwitterStatusWordExtractor extends BaseOperator
     @Override
     public void process(String text)
     {
-      String strs[] = text.split(" ");
+      String[] strs = text.split(" ");
       if (strs != null) {
         for (String str : strs) {
           if (str != null && !filterList.contains(str) ) {
@@ -56,7 +56,7 @@ public class TwitterStatusWordExtractor extends BaseOperator
   public void setup(OperatorContext context)
   {
     this.filterList = new HashSet<String>(Arrays.asList(new String[]{"", " ","I","you","the","a","to","as","he","him","his","her","she","me","can","for","of","and","or","but",
-           "this","that","!",",",".",":","#","/","@","be","in","out","was","were","is","am","are","so","no","...","my","de","RT","on","que","la","i","your","it","have","with","?","when",
-    "up","just","do","at","&","-","+","*","\\","y","n","like","se","en","te","el","I'm"}));
+      "this","that","!",",",".",":","#","/","@","be","in","out","was","were","is","am","are","so","no","...","my","de","RT","on","que","la","i","your","it","have","with","?","when",
+      "up","just","do","at","&","-","+","*","\\","y","n","like","se","en","te","el","I'm"}));
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopCounterApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopCounterApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopCounterApplication.java
index c8d3b00..731a38f 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopCounterApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopCounterApplication.java
@@ -19,10 +19,14 @@
 package com.datatorrent.demos.twitter;
 
 import java.net.URI;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
@@ -35,15 +39,10 @@ import com.datatorrent.contrib.twitter.TwitterSampleInput;
 import com.datatorrent.lib.algo.UniqueCounter;
 import com.datatorrent.lib.appdata.schemas.SchemaUtils;
 import com.datatorrent.lib.appdata.snapshot.AppDataSnapshotServerMap;
+import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.io.PubSubWebSocketAppDataQuery;
 import com.datatorrent.lib.io.PubSubWebSocketAppDataResult;
-import com.datatorrent.lib.io.ConsoleOutputOperator;
-import com.datatorrent.lib.io.PubSubWebSocketOutputOperator;
 
-import com.google.common.collect.Maps;
-
-import java.util.List;
-import java.util.Map;
 /**
  * Twitter Demo Application: <br>
  * This demo application samples random public status from twitter, send to url
@@ -147,7 +146,7 @@ import java.util.Map;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name=TwitterTopCounterApplication.APP_NAME)
+@ApplicationAnnotation(name = TwitterTopCounterApplication.APP_NAME)
 public class TwitterTopCounterApplication implements StreamingApplication
 {
   public static final String SNAPSHOT_SCHEMA = "twitterURLDataSchema.json";
@@ -188,11 +187,7 @@ public class TwitterTopCounterApplication implements StreamingApplication
     consoleOutput(dag, "topURLs", topCounts.output, SNAPSHOT_SCHEMA, "url");
   }
 
-  public static void consoleOutput(DAG dag,
-                                   String operatorName,
-                                   OutputPort<List<Map<String, Object>>> topCount,
-                                   String schemaFile,
-                                   String alias)
+  public static void consoleOutput(DAG dag, String operatorName, OutputPort<List<Map<String, Object>>> topCount, String schemaFile, String alias)
   {
     String gatewayAddress = dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS);
     if (!StringUtils.isEmpty(gatewayAddress)) {
@@ -217,8 +212,7 @@ public class TwitterTopCounterApplication implements StreamingApplication
 
       dag.addStream("MapProvider", topCount, snapshotServer.input);
       dag.addStream("Result", snapshotServer.queryResult, queryResultPort);
-    }
-    else {
+    } else {
       ConsoleOutputOperator operator = dag.addOperator(operatorName, new ConsoleOutputOperator());
       operator.setStringFormat(operatorName + ": %s");
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopWordsApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopWordsApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopWordsApplication.java
index 8ed3678..3953ab7 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopWordsApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTopWordsApplication.java
@@ -18,6 +18,7 @@
  */
 package com.datatorrent.demos.twitter;
 
+import org.apache.hadoop.conf.Configuration;
 
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
@@ -25,9 +26,6 @@ import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.contrib.twitter.TwitterSampleInput;
 import com.datatorrent.lib.algo.UniqueCounter;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-
 
 /**
  * This application is same as other twitter demo
@@ -43,7 +41,7 @@ import org.apache.hadoop.conf.Configuration;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name=TwitterTopWordsApplication.APP_NAME)
+@ApplicationAnnotation(name = TwitterTopWordsApplication.APP_NAME)
 public class TwitterTopWordsApplication implements StreamingApplication
 {
   public static final String SNAPSHOT_SCHEMA = "twitterWordDataSchema.json";

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTrendingHashtagsApplication.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTrendingHashtagsApplication.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTrendingHashtagsApplication.java
index 5246060..3597a92 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTrendingHashtagsApplication.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/TwitterTrendingHashtagsApplication.java
@@ -18,16 +18,13 @@
  */
 package com.datatorrent.demos.twitter;
 
+import org.apache.hadoop.conf.Configuration;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.contrib.twitter.TwitterSampleInput;
 import com.datatorrent.lib.algo.UniqueCounter;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-
-
 
 /**
  * Twitter Demo Application: <br>
@@ -134,7 +131,7 @@ import org.apache.hadoop.conf.Configuration;
  *
  * @since 1.0.2
  */
-@ApplicationAnnotation(name=TwitterTrendingHashtagsApplication.APP_NAME)
+@ApplicationAnnotation(name = TwitterTrendingHashtagsApplication.APP_NAME)
 public class TwitterTrendingHashtagsApplication implements StreamingApplication
 {
   public static final String SNAPSHOT_SCHEMA = "twitterHashTagDataSchema.json";

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/URLSerDe.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/URLSerDe.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/URLSerDe.java
index 7f6f399..43ed8f7 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/URLSerDe.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/URLSerDe.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.demos.twitter;
 
-import com.datatorrent.api.StreamCodec;
-import com.datatorrent.netlet.util.Slice;
 import java.nio.ByteBuffer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import com.datatorrent.api.StreamCodec;
+import com.datatorrent.netlet.util.Slice;
 
 /**
  * <p>URLSerDe class.</p>
@@ -42,11 +42,9 @@ public class URLSerDe implements StreamCodec<byte[]>
   {
     if (fragment == null || fragment.buffer == null) {
       return null;
-    }
-    else if (fragment.offset == 0 && fragment.length == fragment.buffer.length) {
+    } else if (fragment.offset == 0 && fragment.length == fragment.buffer.length) {
       return fragment.buffer;
-    }
-    else {
+    } else {
       byte[] buffer = new byte[fragment.buffer.length];
       System.arraycopy(fragment.buffer, fragment.offset, buffer, 0, fragment.length);
       return buffer;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/main/java/com/datatorrent/demos/twitter/WindowedTopCounter.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/WindowedTopCounter.java b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/WindowedTopCounter.java
index 449c903..20bb673 100644
--- a/demos/twitter/src/main/java/com/datatorrent/demos/twitter/WindowedTopCounter.java
+++ b/demos/twitter/src/main/java/com/datatorrent/demos/twitter/WindowedTopCounter.java
@@ -18,23 +18,25 @@
  */
 package com.datatorrent.demos.twitter;
 
-import java.util.*;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.PriorityQueue;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.api.*;
-import com.datatorrent.api.Context.OperatorContext;
-
-import com.datatorrent.common.util.BaseOperator;
-
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  *
  * WindowedTopCounter is an operator which counts the most often occurring tuples in a sliding window of a specific size.
@@ -114,8 +116,7 @@ public class WindowedTopCounter<T> extends BaseOperator
 
       if (holder.totalCount == 0) {
         iterator.remove();
-      }
-      else {
+      } else {
         topCounter.add(holder);
         if (--i == 0) {
           break;
@@ -138,8 +139,7 @@ public class WindowedTopCounter<T> extends BaseOperator
           topCounter.poll();
           topCounter.add(holder);
           smallest = topCounter.peek().totalCount;
-        }
-        else if (holder.totalCount == 0) {
+        } else if (holder.totalCount == 0) {
           iterator.remove();
         }
       }
@@ -149,7 +149,7 @@ public class WindowedTopCounter<T> extends BaseOperator
 
     Iterator<SlidingContainer<T>> topIter = topCounter.iterator();
 
-    while(topIter.hasNext()) {
+    while (topIter.hasNext()) {
       final SlidingContainer<T> wh = topIter.next();
       Map<String, Object> tableRow = Maps.newHashMap();
 
@@ -254,8 +254,7 @@ public class WindowedTopCounter<T> extends BaseOperator
     {
       if (o1.totalCount > o2.totalCount) {
         return 1;
-      }
-      else if (o1.totalCount < o2.totalCount) {
+      } else if (o1.totalCount < o2.totalCount) {
         return -1;
       }
 
@@ -274,8 +273,8 @@ public class WindowedTopCounter<T> extends BaseOperator
     @Override
     public int compare(Map<String, Object> o1, Map<String, Object> o2)
     {
-      Integer count1 = (Integer) o1.get(FIELD_COUNT);
-      Integer count2 = (Integer) o2.get(FIELD_COUNT);
+      Integer count1 = (Integer)o1.get(FIELD_COUNT);
+      Integer count2 = (Integer)o2.get(FIELD_COUNT);
 
       return count1.compareTo(count2);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterDumpApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterDumpApplicationTest.java b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterDumpApplicationTest.java
index a4daf09..cd211ff 100644
--- a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterDumpApplicationTest.java
+++ b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterDumpApplicationTest.java
@@ -19,10 +19,11 @@
 package com.datatorrent.demos.twitter;
 
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 
+import static org.junit.Assert.assertEquals;
+
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.LocalMode;
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopCounterTest.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopCounterTest.java b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopCounterTest.java
index 0ad4d18..91a4e20 100644
--- a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopCounterTest.java
+++ b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopCounterTest.java
@@ -18,11 +18,10 @@
  */
 package com.datatorrent.demos.twitter;
 
+import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
 import com.datatorrent.api.LocalMode;
 import com.datatorrent.contrib.twitter.TwitterSampleInput;
-import com.datatorrent.demos.twitter.TwitterTopCounterApplication;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
 
 /**
  * Test the DAG declaration in local mode.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopWordsTest.java
----------------------------------------------------------------------
diff --git a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopWordsTest.java b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopWordsTest.java
index a27c60f..4ac2e8d 100644
--- a/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopWordsTest.java
+++ b/demos/twitter/src/test/java/com/datatorrent/demos/twitter/TwitterTopWordsTest.java
@@ -18,12 +18,11 @@
  */
 package com.datatorrent.demos.twitter;
 
+import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
 import com.datatorrent.api.LocalMode;
 import com.datatorrent.contrib.twitter.TwitterSampleInput;
 
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
-
 /**
  * Test the DAG declaration in local mode.
  */
@@ -38,9 +37,9 @@ public class TwitterTopWordsTest
   @Test
   public void testApplication() throws Exception
   {
-	TwitterTopWordsApplication app = new TwitterTopWordsApplication();
-	Configuration conf =new Configuration(false);
-	conf.addResource("dt-site-rollingtopwords.xml");
+    TwitterTopWordsApplication app = new TwitterTopWordsApplication();
+    Configuration conf = new Configuration(false);
+    conf.addResource("dt-site-rollingtopwords.xml");
     LocalMode lma = LocalMode.newInstance();
     lma.prepareDAG(app, conf);
     LocalMode.Controller lc = lma.getController();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/Application.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/Application.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/Application.java
index 1b27cea..57ef1a1 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/Application.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/Application.java
@@ -18,7 +18,6 @@
  */
 package com.datatorrent.demos.uniquecount;
 
-
 import org.apache.hadoop.conf.Configuration;
 
 import com.datatorrent.api.Context;
@@ -27,6 +26,7 @@ import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 
+import com.datatorrent.common.partitioner.StatelessPartitioner;
 import com.datatorrent.lib.algo.UniqueCounter;
 import com.datatorrent.lib.converter.MapToKeyHashValuePairConverter;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
@@ -34,8 +34,6 @@ import com.datatorrent.lib.stream.Counter;
 import com.datatorrent.lib.stream.StreamDuplicater;
 import com.datatorrent.lib.util.KeyHashValPair;
 
-import com.datatorrent.common.partitioner.StatelessPartitioner;
-
 /**
  * Application to demonstrate PartitionableUniqueCount operator. <br>
  * The input operator generate random keys, which is sent to
@@ -45,7 +43,7 @@ import com.datatorrent.common.partitioner.StatelessPartitioner;
  *
  * @since 1.0.2
  */
-@ApplicationAnnotation(name="UniqueValueCountDemo")
+@ApplicationAnnotation(name = "UniqueValueCountDemo")
 public class Application implements StreamingApplication
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/CountVerifier.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/CountVerifier.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/CountVerifier.java
index 3a5140d..d201037 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/CountVerifier.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/CountVerifier.java
@@ -18,6 +18,9 @@
  */
 package com.datatorrent.demos.uniquecount;
 
+import java.util.HashMap;
+import java.util.Map;
+
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
@@ -25,9 +28,6 @@ import com.datatorrent.api.Operator;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.lib.util.KeyHashValPair;
 
-import java.util.HashMap;
-import java.util.Map;
-
 /*
 Compare results and print non-matching values to console.
  */
@@ -41,35 +41,33 @@ public class CountVerifier<K> implements Operator
   HashMap<K, Integer> map1 = new HashMap<K, Integer>();
   HashMap<K, Integer> map2 = new HashMap<K, Integer>();
 
-  public transient final DefaultInputPort<KeyHashValPair<K, Integer>> in1 =
-      new DefaultInputPort<KeyHashValPair<K, Integer>>()
-      {
-        @Override
-        public void process(KeyHashValPair<K, Integer> tuple)
-        {
-          processTuple(tuple, map1);
-        }
-      };
+  public final transient DefaultInputPort<KeyHashValPair<K, Integer>> in1 = new DefaultInputPort<KeyHashValPair<K, Integer>>()
+  {
+    @Override
+    public void process(KeyHashValPair<K, Integer> tuple)
+    {
+      processTuple(tuple, map1);
+    }
+  };
 
-  public transient final DefaultInputPort<KeyHashValPair<K, Integer>> in2 =
-      new DefaultInputPort<KeyHashValPair<K, Integer>>()
-      {
-        @Override
-        public void process(KeyHashValPair<K, Integer> tuple)
-        {
-          processTuple(tuple, map2);
-        }
-      };
+  public final transient DefaultInputPort<KeyHashValPair<K, Integer>> in2 = new DefaultInputPort<KeyHashValPair<K, Integer>>()
+  {
+    @Override
+    public void process(KeyHashValPair<K, Integer> tuple)
+    {
+      processTuple(tuple, map2);
+    }
+  };
 
   void processTuple(KeyHashValPair<K, Integer> tuple, HashMap<K, Integer> map)
   {
     map.put(tuple.getKey(), tuple.getValue());
   }
 
-  @OutputPortFieldAnnotation(optional=true)
-  public transient final DefaultOutputPort<Integer> successPort = new DefaultOutputPort<Integer>();
-  @OutputPortFieldAnnotation(optional=true)
-  public transient final DefaultOutputPort<Integer> failurePort = new DefaultOutputPort<Integer>();
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Integer> successPort = new DefaultOutputPort<Integer>();
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Integer> failurePort = new DefaultOutputPort<Integer>();
 
   @Override
   public void beginWindow(long l)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomDataGenerator.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomDataGenerator.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomDataGenerator.java
index 2742961..e806759 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomDataGenerator.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomDataGenerator.java
@@ -18,14 +18,17 @@
  */
 package com.datatorrent.demos.uniquecount;
 
+import java.util.HashMap;
+import java.util.Random;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
 import com.datatorrent.lib.util.KeyValPair;
 
-import java.util.HashMap;
-import java.util.Random;
-
 /**
  * Generate random Key value pairs.
  * key is string and value is int, it emits the pair as KeyValPair on outPort,
@@ -36,6 +39,7 @@ public class RandomDataGenerator implements InputOperator
 {
   public final transient DefaultOutputPort<KeyValPair<String, Object>> outPort = new DefaultOutputPort<KeyValPair<String, Object>>();
   private HashMap<String, Integer> dataInfo;
+  private final transient Logger LOG = LoggerFactory.getLogger(RandomDataGenerator.class);
   private int count;
   private int sleepMs = 10;
   private int keyRange = 100;
@@ -51,15 +55,15 @@ public class RandomDataGenerator implements InputOperator
   @Override
   public void emitTuples()
   {
-    for(int i = 0 ; i < tupleBlast; i++) {
+    for (int i = 0; i < tupleBlast; i++) {
       String key = String.valueOf(random.nextInt(keyRange));
       int val = random.nextInt(valueRange);
       outPort.emit(new KeyValPair<String, Object>(key, val));
     }
     try {
       Thread.sleep(sleepMs);
-    } catch(Exception ex) {
-      System.out.println(ex.getMessage());
+    } catch (Exception ex) {
+      LOG.error(ex.getMessage());
     }
     count++;
   }
@@ -93,7 +97,7 @@ public class RandomDataGenerator implements InputOperator
   @Override
   public void endWindow()
   {
-    System.out.println("emitTuples called  " + count + " times in this window");
+    LOG.debug("emitTuples called  " + count + " times in this window");
     count = 0;
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeyValues.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeyValues.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeyValues.java
index feeb282..28f3bc0 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeyValues.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeyValues.java
@@ -18,16 +18,16 @@
  */
 package com.datatorrent.demos.uniquecount;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.lib.util.KeyValPair;
-
 import java.util.BitSet;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * Input port operator for generating random values on keys. <br>
  * Key(s)   : key + integer in range between 0 and numKeys <br>
@@ -37,107 +37,117 @@ import java.util.Random;
  */
 public class RandomKeyValues implements InputOperator
 {
-	public final transient DefaultOutputPort<KeyValPair<String, Object>> outport = new DefaultOutputPort<KeyValPair<String, Object>>();
-	private Random random = new Random(11111);
-    private int numKeys;
-    private int numValuesPerKeys;
-    private int tuppleBlast = 1000;
-    private int emitDelay = 20; /* 20 ms */
-
-    /* For verification */
-    private Map<Integer, BitSet> history = new HashMap<Integer, BitSet>();
-
-    public RandomKeyValues() {
-        this.numKeys = 100;
-        this.numValuesPerKeys = 100;
-    }
-
-    public RandomKeyValues(int keys, int values) {
-        this.numKeys = keys;
-        this.numValuesPerKeys = values;
-    }
-
-    @Override
-	public void beginWindow(long windowId)
-	{
-	}
-
-	@Override
-	public void endWindow()
-	{
-	}
-
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
-
-	@Override
-	public void teardown()
-	{
-	}
-
-	@Override
-	public void emitTuples()
-	{
-        /* generate tuples randomly, */
-        for(int i = 0; i < tuppleBlast; i++) {
-            int intKey = random.nextInt(numKeys);
-            String key = "key" + String.valueOf(intKey);
-            int value = random.nextInt(numValuesPerKeys);
-
-            // update history for verifying later.
-            BitSet bmap = history.get(intKey);
-            if (bmap == null) {
-                bmap = new BitSet();
-                history.put(intKey, bmap);
-            }
-            bmap.set(value);
-
-            // emit the key with value.
-            outport.emit(new KeyValPair<String, Object>(key, value));
-        }
-		try
-		{
-			Thread.sleep(emitDelay);
-		} catch (Exception e)
-		{
-		}
-	}
-
-    public int getNumKeys() {
-        return numKeys;
-    }
-
-    public void setNumKeys(int numKeys) {
-        this.numKeys = numKeys;
-    }
-
-    public int getNumValuesPerKeys() {
-        return numValuesPerKeys;
-    }
-
-    public void setNumValuesPerKeys(int numValuesPerKeys) {
-        this.numValuesPerKeys = numValuesPerKeys;
-    }
-
-    public int getTuppleBlast() {
-        return tuppleBlast;
+  public final transient DefaultOutputPort<KeyValPair<String, Object>> outport = new DefaultOutputPort<KeyValPair<String, Object>>();
+  private Random random = new Random(11111);
+  private int numKeys;
+  private int numValuesPerKeys;
+  private int tuppleBlast = 1000;
+  private int emitDelay = 20; /* 20 ms */
+
+  /* For verification */
+  private Map<Integer, BitSet> history = new HashMap<Integer, BitSet>();
+
+  public RandomKeyValues()
+  {
+    this.numKeys = 100;
+    this.numValuesPerKeys = 100;
+  }
+
+  public RandomKeyValues(int keys, int values)
+  {
+    this.numKeys = keys;
+    this.numValuesPerKeys = values;
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+  }
+
+  @Override
+  public void endWindow()
+  {
+  }
+
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
+
+  @Override
+  public void teardown()
+  {
+  }
+
+  @Override
+  public void emitTuples()
+  {
+    /* generate tuples randomly, */
+    for (int i = 0; i < tuppleBlast; i++) {
+      int intKey = random.nextInt(numKeys);
+      String key = "key" + String.valueOf(intKey);
+      int value = random.nextInt(numValuesPerKeys);
+
+      // update history for verifying later.
+      BitSet bmap = history.get(intKey);
+      if (bmap == null) {
+        bmap = new BitSet();
+        history.put(intKey, bmap);
+      }
+      bmap.set(value);
+
+      // emit the key with value.
+      outport.emit(new KeyValPair<String, Object>(key, value));
     }
-
-    public void setTuppleBlast(int tuppleBlast) {
-        this.tuppleBlast = tuppleBlast;
-    }
-
-    public int getEmitDelay() {
-        return emitDelay;
-    }
-
-    public void setEmitDelay(int emitDelay) {
-        this.emitDelay = emitDelay;
-    }
-
-    public void debug() {
-
+    try {
+      Thread.sleep(emitDelay);
+    } catch (Exception e) {
+      // Ignore.
     }
+  }
+
+  public int getNumKeys()
+  {
+    return numKeys;
+  }
+
+  public void setNumKeys(int numKeys)
+  {
+    this.numKeys = numKeys;
+  }
+
+  public int getNumValuesPerKeys()
+  {
+    return numValuesPerKeys;
+  }
+
+  public void setNumValuesPerKeys(int numValuesPerKeys)
+  {
+    this.numValuesPerKeys = numValuesPerKeys;
+  }
+
+  public int getTuppleBlast()
+  {
+    return tuppleBlast;
+  }
+
+  public void setTuppleBlast(int tuppleBlast)
+  {
+    this.tuppleBlast = tuppleBlast;
+  }
+
+  public int getEmitDelay()
+  {
+    return emitDelay;
+  }
+
+  public void setEmitDelay(int emitDelay)
+  {
+    this.emitDelay = emitDelay;
+  }
+
+  public void debug()
+  {
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeysGenerator.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeysGenerator.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeysGenerator.java
index 65b5c95..eb9d22c 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeysGenerator.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/RandomKeysGenerator.java
@@ -18,14 +18,18 @@
  */
 package com.datatorrent.demos.uniquecount;
 
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.commons.lang3.mutable.MutableInt;
+
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.lib.util.KeyHashValPair;
-import org.apache.commons.lang3.mutable.MutableInt;
-
-import java.util.*;
 
 /*
     Generate random keys.
@@ -61,8 +65,7 @@ public class RandomKeysGenerator implements InputOperator
       outPort.emit(key);
 
 
-      if (verificationPort.isConnected())
-      {
+      if (verificationPort.isConnected()) {
         // maintain history for later verification.
         MutableInt count = history.get(key);
         if (count == null) {
@@ -74,10 +77,11 @@ public class RandomKeysGenerator implements InputOperator
 
     }
     try {
-      if (sleepTime != 0)
+      if (sleepTime != 0) {
         Thread.sleep(sleepTime);
+      }
     } catch (Exception ex) {
-
+      // Ignore.
     }
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/UniqueKeyValCountDemo.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/UniqueKeyValCountDemo.java b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/UniqueKeyValCountDemo.java
index 95323d5..eb9e392 100644
--- a/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/UniqueKeyValCountDemo.java
+++ b/demos/uniquecount/src/main/java/com/datatorrent/demos/uniquecount/UniqueKeyValCountDemo.java
@@ -18,7 +18,6 @@
  */
 package com.datatorrent.demos.uniquecount;
 
-
 import org.apache.hadoop.conf.Configuration;
 
 import com.datatorrent.api.Context;
@@ -27,19 +26,19 @@ import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 
+import com.datatorrent.common.partitioner.StatelessPartitioner;
+
 import com.datatorrent.lib.algo.UniqueCounter;
 import com.datatorrent.lib.converter.MapToKeyHashValuePairConverter;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
 import com.datatorrent.lib.util.KeyValPair;
 
-import com.datatorrent.common.partitioner.StatelessPartitioner;
-
 /**
  * <p>UniqueKeyValCountDemo class.</p>
  *
  * @since 1.0.2
  */
-@ApplicationAnnotation(name="UniqueKeyValueCountDemo")
+@ApplicationAnnotation(name = "UniqueKeyValueCountDemo")
 public class UniqueKeyValCountDemo implements StreamingApplication
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/ApplicationTest.java b/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/ApplicationTest.java
index 991a94d..66a0af1 100644
--- a/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/ApplicationTest.java
+++ b/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/ApplicationTest.java
@@ -18,9 +18,9 @@
  */
 package com.datatorrent.demos.uniquecount;
 
-import com.datatorrent.api.LocalMode;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  * Test the DAG declaration in local mode.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/UniqueKeyValDemoTest.java
----------------------------------------------------------------------
diff --git a/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/UniqueKeyValDemoTest.java b/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/UniqueKeyValDemoTest.java
index 01e790a..a198247 100644
--- a/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/UniqueKeyValDemoTest.java
+++ b/demos/uniquecount/src/test/java/com/datatorrent/demos/uniquecount/UniqueKeyValDemoTest.java
@@ -18,9 +18,9 @@
  */
 package com.datatorrent.demos.uniquecount;
 
-import com.datatorrent.api.LocalMode;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  * Test the DAG declaration in local mode.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/Application.java
----------------------------------------------------------------------
diff --git a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/Application.java b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/Application.java
index 1028080..d0512cf 100644
--- a/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/Application.java
+++ b/demos/wordcount/src/main/java/com/datatorrent/demos/wordcount/Application.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.demos.wordcount;
 
-import com.datatorrent.api.annotation.ApplicationAnnotation;
-import com.datatorrent.api.StreamingApplication;
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.DAG;
+import com.datatorrent.api.StreamingApplication;
+import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.lib.algo.UniqueCounter;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
 
-import org.apache.hadoop.conf.Configuration;
-
 /**
  * Simple Word Count Demo : <br>
  * This is application to count total occurrence of each word from file or any
@@ -72,8 +72,8 @@ import org.apache.hadoop.conf.Configuration;
  * Streaming Window Size : 500ms
  * Operator Details : <br>
  * <ul>
- * 	<li>
- *     <p><b> The operator wordinput : </b> This operator opens local file, reads each line and sends each word to application.
+ * <li>
+ * <p><b> The operator wordinput : </b> This operator opens local file, reads each line and sends each word to application.
  *         This can replaced by any input stream by user. <br>
  *     Class : {@link com.datatorrent.demos.wordcount.WordCountInputOperator}  <br>
  *     Operator Application Window Count : 1 <br>
@@ -93,10 +93,10 @@ import org.apache.hadoop.conf.Configuration;
  *
  * @since 0.3.2
  */
-@ApplicationAnnotation(name="WordCountDemo")
+@ApplicationAnnotation(name = "WordCountDemo")
 public class Application implements StreamingApplication
 {
-   @Override
+  @Override
   public void populateDAG(DAG dag, Configuration conf)
   {
     WordCountInputOperator input = dag.addOperator("wordinput", new WordCountInputOperator());
@@ -104,8 +104,5 @@ public class Application implements StreamingApplication
     dag.addStream("wordinput-count", input.outputPort, wordCount.data);
     ConsoleOutputOperator consoleOperator = dag.addOperator("console", new ConsoleOutputOperator());
     dag.addStream("count-console",wordCount.count, consoleOperator.input);
-
   }
-
-
 }


[4/6] apex-malhar git commit: Fixed checkstyle errors for demos.

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java
index 6a6777e..c8a0e51 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java
@@ -57,7 +57,7 @@ import static org.apache.apex.malhar.stream.api.Option.Options.name;
 public class WindowedWordCount implements StreamingApplication
 {
   static final int WINDOW_SIZE = 1;  // Default window duration in minutes
-  
+
   /**
    * A input operator that reads from and output a file line by line to downstream with a time gap between
    * every two lines.
@@ -65,23 +65,23 @@ public class WindowedWordCount implements StreamingApplication
   public static class TextInput extends BaseOperator implements InputOperator
   {
     private static boolean done = false;
-    
+
     public final transient DefaultOutputPort<String> output = new DefaultOutputPort<>();
-    
+
     private transient BufferedReader reader;
-  
+
     public static boolean isDone()
     {
       return done;
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
       done = false;
       initReader();
     }
-    
+
     private void initReader()
     {
       try {
@@ -91,13 +91,13 @@ public class WindowedWordCount implements StreamingApplication
         throw Throwables.propagate(ex);
       }
     }
-    
+
     @Override
     public void teardown()
     {
       IOUtils.closeQuietly(reader);
     }
-    
+
     @Override
     public void emitTuples()
     {
@@ -118,16 +118,16 @@ public class WindowedWordCount implements StreamingApplication
       }
     }
   }
-  
+
   public static class Collector extends BaseOperator
   {
     private static Map<KeyValPair<Long, String>, Long> result = new HashMap<>();
-  
+
     public static Map<KeyValPair<Long, String>, Long> getResult()
     {
       return result;
     }
-  
+
     public final transient DefaultInputPort<PojoEvent> input = new DefaultInputPort<PojoEvent>()
     {
       @Override
@@ -137,7 +137,7 @@ public class WindowedWordCount implements StreamingApplication
       }
     };
   }
-  
+
   /**
    * A Pojo Tuple class used for outputting result to JDBC.
    */
@@ -146,44 +146,44 @@ public class WindowedWordCount implements StreamingApplication
     private String word;
     private long count;
     private long timestamp;
-  
+
     @Override
     public String toString()
     {
       return "PojoEvent (word=" + getWord() + ", count=" + getCount() + ", timestamp=" + getTimestamp() + ")";
     }
-  
+
     public String getWord()
     {
       return word;
     }
-  
+
     public void setWord(String word)
     {
       this.word = word;
     }
-  
+
     public long getCount()
     {
       return count;
     }
-  
+
     public void setCount(long count)
     {
       this.count = count;
     }
-  
+
     public long getTimestamp()
     {
       return timestamp;
     }
-  
+
     public void setTimestamp(long timestamp)
     {
       this.timestamp = timestamp;
     }
   }
-  
+
   /**
    * A map function that wrap the input string with a random generated timestamp.
    */
@@ -191,12 +191,12 @@ public class WindowedWordCount implements StreamingApplication
   {
     private static final Duration RAND_RANGE = Duration.standardMinutes(10);
     private final Long minTimestamp;
-    
+
     AddTimestampFn()
     {
       this.minTimestamp = System.currentTimeMillis();
     }
-    
+
     @Override
     public Tuple.TimestampedTuple<String> f(String input)
     {
@@ -207,7 +207,7 @@ public class WindowedWordCount implements StreamingApplication
       return new Tuple.TimestampedTuple<>(randomTimestamp, input);
     }
   }
-  
+
   /** A MapFunction that converts a Word and Count into a PojoEvent. */
   public static class FormatAsTableRowFn implements Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String, Long>>, PojoEvent>
   {
@@ -221,7 +221,7 @@ public class WindowedWordCount implements StreamingApplication
       return row;
     }
   }
-  
+
   /**
    * Populate dag with High-Level API.
    * @param dag
@@ -232,10 +232,10 @@ public class WindowedWordCount implements StreamingApplication
   {
     TextInput input = new TextInput();
     Collector collector = new Collector();
-    
+
     // Create stream from the TextInput operator.
     ApexStream<Tuple.TimestampedTuple<String>> stream = StreamFactory.fromInput(input, input.output, name("input"))
-      
+
         // Extract all the words from the input line of text.
         .flatMap(new Function.FlatMapFunction<String, String>()
         {
@@ -245,18 +245,18 @@ public class WindowedWordCount implements StreamingApplication
             return Arrays.asList(input.split("[\\p{Punct}\\s]+"));
           }
         }, name("ExtractWords"))
-      
+
         // Wrap the word with a randomly generated timestamp.
         .map(new AddTimestampFn(), name("AddTimestampFn"));
-    
-   
+
+
     // apply window and trigger option.
     // TODO: change trigger option to atWaterMark when available.
     WindowedStream<Tuple.TimestampedTuple<String>> windowedWords = stream
         .window(new WindowOption.TimeWindows(Duration.standardMinutes(WINDOW_SIZE)),
         new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1));
-    
-    
+
+
     WindowedStream<PojoEvent> wordCounts =
         // Perform a countByKey transformation to count the appearance of each word in every time window.
         windowedWords.countByKey(new Function.ToKeyValue<Tuple.TimestampedTuple<String>, String, Long>()
@@ -268,10 +268,10 @@ public class WindowedWordCount implements StreamingApplication
               new KeyValPair<String, Long>(input.getValue(), 1L));
           }
         }, name("count words"))
-          
+
         // Format the output and print out the result.
         .map(new FormatAsTableRowFn(), name("FormatAsTableRowFn")).print();
-    
+
     wordCounts.endWith(collector, collector.input, name("Collector")).populateDag(dag);
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/AutoComplete.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/AutoComplete.java
index 29c8cf9..00c40e7 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/AutoComplete.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/AutoComplete.java
@@ -79,12 +79,12 @@ public class AutoComplete implements StreamingApplication
     public final transient DefaultOutputPort<String> output = new DefaultOutputPort<>();
 
     private transient BufferedReader reader;
-  
+
     public static boolean isDone()
     {
       return done;
     }
-  
+
     @Override
     public void setup(OperatorContext context)
     {
@@ -128,16 +128,16 @@ public class AutoComplete implements StreamingApplication
       }
     }
   }
-  
+
   public static class Collector extends BaseOperator
   {
     private static Map<String, List<CompletionCandidate>> result = new HashMap<>();
-  
+
     public static Map<String, List<CompletionCandidate>> getResult()
     {
       return result;
     }
-  
+
     public final transient DefaultInputPort<Tuple.WindowedTuple<KeyValPair<String, List<CompletionCandidate>>>> input = new DefaultInputPort<Tuple.WindowedTuple<KeyValPair<String, List<CompletionCandidate>>>>()
     {
       @Override
@@ -193,7 +193,7 @@ public class AutoComplete implements StreamingApplication
           @Override
           public Tuple<KeyValPair<String, CompletionCandidate>> f(KeyValPair<String, CompletionCandidate> tuple)
           {
-            // TODO: Should be removed after Auto-wrapping is supported. 
+            // TODO: Should be removed after Auto-wrapping is supported.
             return new Tuple.WindowedTuple<>(Window.GLOBAL_WINDOW, tuple);
           }
         });
@@ -271,7 +271,8 @@ public class AutoComplete implements StreamingApplication
             {
               return new Tuple.PlainTuple<>(new KeyValPair<>(input, 1L));
             }
-          }, name("countByKey")).map(new Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String,Long>>, CompletionCandidate>()
+          }, name("countByKey"))
+          .map(new Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String,Long>>, CompletionCandidate>()
           {
             @Override
             public CompletionCandidate f(Tuple.WindowedTuple<KeyValPair<String, Long>> input)
@@ -300,7 +301,7 @@ public class AutoComplete implements StreamingApplication
 
     ApexStream<String> tags = StreamFactory.fromInput(input, input.output, name("tweetSampler"))
         .flatMap(new ExtractHashtags());
-    
+
     tags.window(windowOption, new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1))
         .addCompositeStreams(ComputeTopCompletions.top(10, true)).endWith(collector, collector.input, name("collector"))
         .populateDag(dag);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/CompletionCandidate.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/CompletionCandidate.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/CompletionCandidate.java
index 8a7113e..5531b5e 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/CompletionCandidate.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/CompletionCandidate.java
@@ -45,7 +45,7 @@ public class CompletionCandidate implements Comparable<CompletionCandidate>
   // Empty constructor required for Kryo.
   public CompletionCandidate()
   {
-    
+
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/PojoEvent.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/PojoEvent.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/PojoEvent.java
index 2a4c003..e7eb90c 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/PojoEvent.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/PojoEvent.java
@@ -24,18 +24,18 @@ package org.apache.apex.malhar.stream.sample.complete;
 public class PojoEvent extends Object
 {
   private String stringValue;
-  
+
   @Override
   public String toString()
   {
     return "PojoEvent [stringValue=" + getStringValue() + "]";
   }
-  
+
   public void setStringValue(String newString)
   {
     this.stringValue = newString;
   }
-  
+
   public String getStringValue()
   {
     return this.stringValue;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtract.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtract.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtract.java
index 2ffdc82..845901a 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtract.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtract.java
@@ -45,17 +45,17 @@ public class StreamingWordExtract implements StreamingApplication
 {
   private static int wordCount = 0; // A counter to count number of words have been extracted.
   private static int entriesMapped = 0; // A counter to count number of entries have been mapped.
-  
+
   public int getWordCount()
   {
     return wordCount;
   }
-  
+
   public int getEntriesMapped()
   {
     return entriesMapped;
   }
-  
+
   /**
    * A MapFunction that tokenizes lines of text into individual words.
    */
@@ -69,8 +69,8 @@ public class StreamingWordExtract implements StreamingApplication
       return result;
     }
   }
-  
-  
+
+
   /**
    * A MapFunction that uppercases a word.
    */
@@ -82,8 +82,8 @@ public class StreamingWordExtract implements StreamingApplication
       return input.toUpperCase();
     }
   }
-  
-  
+
+
   /**
    * A filter function to filter out empty strings.
    */
@@ -95,14 +95,14 @@ public class StreamingWordExtract implements StreamingApplication
       return !input.isEmpty();
     }
   }
-  
-  
+
+
   /**
    * A map function to map the result string to a pojo entry.
    */
   public static class PojoMapper implements Function.MapFunction<String, Object>
   {
-  
+
     @Override
     public Object f(String input)
     {
@@ -112,7 +112,7 @@ public class StreamingWordExtract implements StreamingApplication
       return pojo;
     }
   }
-  
+
   /**
    * Add field infos to the {@link JdbcPOJOInsertOutputOperator}.
    */
@@ -122,7 +122,7 @@ public class StreamingWordExtract implements StreamingApplication
     fieldInfos.add(new JdbcFieldInfo("STRINGVALUE", "stringValue", JdbcFieldInfo.SupportType.STRING, VARCHAR));
     return fieldInfos;
   }
-  
+
   /**
    * Populate dag with High-Level API.
    * @param dag
@@ -136,25 +136,25 @@ public class StreamingWordExtract implements StreamingApplication
     JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
     jdbcOutput.setStore(outputStore);
     jdbcOutput.setTablename("TestTable");
-    
+
     // Create a stream reading from a folder.
     ApexStream<String> stream = StreamFactory.fromFolder("./src/test/resources/data");
 
     // Extract all the words from the input line of text.
     stream.flatMap(new ExtractWords())
-      
+
         // Filter out the empty strings.
         .filter(new EmptyStringFilter())
-      
+
         // Change every word to uppercase.
         .map(new Uppercase())
-      
+
         // Map the resulted word to a Pojo entry.
         .map(new PojoMapper())
-      
+
         // Output the entries to JdbcOutput and insert them into a table.
         .endWith(jdbcOutput, jdbcOutput.input, Option.Options.name("jdbcOutput"));
-    
+
     stream.populateDag(dag);
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessions.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessions.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessions.java
index f2e70b1..d7d62fe 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessions.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessions.java
@@ -63,23 +63,23 @@ public class TopWikipediaSessions implements StreamingApplication
   {
     private String[] names = new String[]{"user1", "user2", "user3", "user4"};
     public transient DefaultOutputPort<KeyValPair<String, Long>> output = new DefaultOutputPort<>();
-  
+
     private static final Duration RAND_RANGE = Duration.standardDays(365);
     private Long minTimestamp;
     private long sleepTime;
     private static int tupleCount = 0;
-  
+
     public static int getTupleCount()
     {
       return tupleCount;
     }
-  
+
     private String randomName(String[] names)
     {
       int index = new Random().nextInt(names.length);
       return names[index];
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
@@ -88,7 +88,7 @@ public class TopWikipediaSessions implements StreamingApplication
       minTimestamp = System.currentTimeMillis();
       sleepTime = context.getValue(Context.OperatorContext.SPIN_MILLIS);
     }
-  
+
     @Override
     public void emitTuples()
     {
@@ -103,17 +103,17 @@ public class TopWikipediaSessions implements StreamingApplication
       }
     }
   }
-  
+
   public static class Collector extends BaseOperator
   {
     private final int resultSize = 5;
     private static List<List<TempWrapper>> result = new ArrayList<>();
-  
+
     public static List<List<TempWrapper>> getResult()
     {
       return result;
     }
-  
+
     public final transient DefaultInputPort<Tuple.WindowedTuple<List<TempWrapper>>> input = new DefaultInputPort<Tuple.WindowedTuple<List<TempWrapper>>>()
     {
       @Override
@@ -126,8 +126,8 @@ public class TopWikipediaSessions implements StreamingApplication
       }
     };
   }
-  
-  
+
+
   /**
    * Convert the upstream (user, time) combination to a timestamped tuple of user.
    */
@@ -138,13 +138,13 @@ public class TopWikipediaSessions implements StreamingApplication
     {
       long timestamp = input.getValue();
       String userName = input.getKey();
-   
+
       // Sets the implicit timestamp field to be used in windowing.
       return new Tuple.TimestampedTuple<>(timestamp, userName);
-      
+
     }
   }
-  
+
   /**
    * Computes the number of edits in each user session.  A session is defined as
    * a string of edits where each is separated from the next by less than an hour.
@@ -156,10 +156,10 @@ public class TopWikipediaSessions implements StreamingApplication
     public WindowedStream<Tuple.WindowedTuple<KeyValPair<String, Long>>> compose(ApexStream<Tuple.TimestampedTuple<String>> inputStream)
     {
       return inputStream
-        
+
         // Chuck the stream into session windows.
         .window(new WindowOption.SessionWindows(Duration.standardHours(1)), new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1))
-        
+
         // Count the number of edits for a user within one session.
         .countByKey(new Function.ToKeyValue<Tuple.TimestampedTuple<String>, String, Long>()
         {
@@ -171,7 +171,7 @@ public class TopWikipediaSessions implements StreamingApplication
         }, name("ComputeSessions"));
     }
   }
-  
+
   /**
    * A comparator class used for comparing two TempWrapper objects.
    */
@@ -183,7 +183,7 @@ public class TopWikipediaSessions implements StreamingApplication
       return Long.compare(o1.getValue().getValue(), o2.getValue().getValue());
     }
   }
-  
+
   /**
    * A function to extract timestamp from a TempWrapper object.
    */
@@ -196,7 +196,7 @@ public class TopWikipediaSessions implements StreamingApplication
       return input.getTimestamp();
     }
   }
-  
+
   /**
    * A temporary wrapper to wrap a KeyValPair and a timestamp together to represent a timestamped tuple, the reason
    * for this is that we cannot resolve a type conflict when calling accumulate(). After the issue resolved, we can
@@ -206,39 +206,39 @@ public class TopWikipediaSessions implements StreamingApplication
   {
     private KeyValPair<String, Long> value;
     private Long timestamp;
-    
+
     public TempWrapper()
     {
-      
+
     }
-    
+
     public TempWrapper(KeyValPair<String, Long> value, Long timestamp)
     {
       this.value = value;
       this.timestamp = timestamp;
     }
-  
+
     @Override
     public String toString()
     {
       return this.value + "  -  " + this.timestamp;
     }
-  
+
     public Long getTimestamp()
     {
       return timestamp;
     }
-  
+
     public void setTimestamp(Long timestamp)
     {
       this.timestamp = timestamp;
     }
-  
+
     public KeyValPair<String, Long> getValue()
     {
       return value;
     }
-  
+
     public void setValue(KeyValPair<String, Long> value)
     {
       this.value = value;
@@ -251,16 +251,16 @@ public class TopWikipediaSessions implements StreamingApplication
   private static class TopPerMonth
       extends CompositeStreamTransform<ApexStream<Tuple.WindowedTuple<KeyValPair<String, Long>>>, WindowedStream<Tuple.WindowedTuple<List<TempWrapper>>>>
   {
-    
+
     @Override
     public WindowedStream<Tuple.WindowedTuple<List<TempWrapper>>> compose(ApexStream<Tuple.WindowedTuple<KeyValPair<String, Long>>> inputStream)
     {
       TopN<TempWrapper> topN = new TopN<>();
       topN.setN(10);
       topN.setComparator(new Comp());
-      
+
       return inputStream
-        
+
         // Map the input WindowedTuple to a TempWrapper object.
         .map(new Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String, Long>>, TempWrapper>()
         {
@@ -270,15 +270,15 @@ public class TopWikipediaSessions implements StreamingApplication
             return new TempWrapper(input.getValue(), input.getWindows().get(0).getBeginTimestamp());
           }
         }, name("TempWrapper"))
-        
+
         // Apply window and trigger option again, this time chuck the stream into fixed time windows.
         .window(new WindowOption.TimeWindows(Duration.standardDays(30)), new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(Duration.standardSeconds(5)))
-        
+
         // Compute the top 10 user-sessions with most number of edits.
         .accumulate(topN, name("TopN")).with("timestampExtractor", new TimestampExtractor());
     }
   }
-  
+
   /**
    * A map function that combine the user and his/her edit session together to a string and use that string as a key
    * with number of edits in that session as value to create a new key value pair to send to downstream.
@@ -293,7 +293,7 @@ public class TopWikipediaSessions implements StreamingApplication
         input.getValue().getValue()));
     }
   }
-  
+
   /**
    * A flapmap function that turns the result into readable format.
    */
@@ -311,7 +311,7 @@ public class TopWikipediaSessions implements StreamingApplication
       return result;
     }
   }
-  
+
   /**
    * A composite transform that compute the top wikipedia sessions.
    */
@@ -327,7 +327,7 @@ public class TopWikipediaSessions implements StreamingApplication
         .addCompositeStreams(new TopPerMonth());
     }
   }
-  
+
   @Override
   public void populateDAG(DAG dag, Configuration conf)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutes.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutes.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutes.java
index 26a2823..3045238 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutes.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutes.java
@@ -63,7 +63,7 @@ public class TrafficRoutes implements StreamingApplication
   static Map<String, String> sdStations = buildStationInfo();
   static final int WINDOW_DURATION = 3;  // Default sliding window duration in minutes
   static final int WINDOW_SLIDE_EVERY = 1;  // Default window 'slide every' setting in minutes
-  
+
   /**
    * This class holds information about a station reading's average speed.
    */
@@ -75,54 +75,54 @@ public class TrafficRoutes implements StreamingApplication
     Double avgSpeed;
     @Nullable
     Long timestamp;
-    
+
     public StationSpeed() {}
-    
+
     public StationSpeed(String stationId, Double avgSpeed, Long timestamp)
     {
       this.stationId = stationId;
       this.avgSpeed = avgSpeed;
       this.timestamp = timestamp;
     }
-  
+
     public void setAvgSpeed(@Nullable Double avgSpeed)
     {
       this.avgSpeed = avgSpeed;
     }
-  
+
     public void setStationId(@Nullable String stationId)
     {
       this.stationId = stationId;
     }
-  
+
     public void setTimestamp(@Nullable Long timestamp)
     {
       this.timestamp = timestamp;
     }
-  
+
     @Nullable
     public Long getTimestamp()
     {
       return timestamp;
     }
-  
+
     public String getStationId()
     {
       return this.stationId;
     }
-    
+
     public Double getAvgSpeed()
     {
       return this.avgSpeed;
     }
-    
+
     @Override
     public int compareTo(StationSpeed other)
     {
       return Long.compare(this.timestamp, other.timestamp);
     }
   }
-  
+
   /**
    * This class holds information about a route's speed/slowdown.
    */
@@ -134,63 +134,63 @@ public class TrafficRoutes implements StreamingApplication
     Double avgSpeed;
     @Nullable
     Boolean slowdownEvent;
-    
+
     public RouteInfo()
     {
-      
+
     }
-    
+
     public RouteInfo(String route, Double avgSpeed, Boolean slowdownEvent)
     {
       this.route = route;
       this.avgSpeed = avgSpeed;
       this.slowdownEvent = slowdownEvent;
     }
-    
+
     public String getRoute()
     {
       return this.route;
     }
-    
+
     public Double getAvgSpeed()
     {
       return this.avgSpeed;
     }
-    
+
     public Boolean getSlowdownEvent()
     {
       return this.slowdownEvent;
     }
   }
-  
+
   /**
    * Extract the timestamp field from the input string, and wrap the input string in a {@link Tuple.TimestampedTuple}
    * with the extracted timestamp.
    */
   static class ExtractTimestamps implements Function.MapFunction<String, Tuple.TimestampedTuple<String>>
   {
-    
+
     @Override
     public Tuple.TimestampedTuple<String> f(String input)
     {
       String[] items = input.split(",");
       String timestamp = tryParseTimestamp(items);
-    
+
       return new Tuple.TimestampedTuple<>(Long.parseLong(timestamp), input);
     }
   }
-  
+
   /**
    * Filter out readings for the stations along predefined 'routes', and output
    * (station, speed info) keyed on route.
    */
   static class ExtractStationSpeedFn implements Function.FlatMapFunction<Tuple.TimestampedTuple<String>, KeyValPair<String, StationSpeed>>
   {
-    
+
     @Override
     public Iterable<KeyValPair<String, StationSpeed>> f(Tuple.TimestampedTuple<String> input)
     {
-      
+
       ArrayList<KeyValPair<String, StationSpeed>> result = new ArrayList<>();
       String[] items = input.getValue().split(",");
       String stationType = tryParseStationType(items);
@@ -210,7 +210,7 @@ public class TrafficRoutes implements StreamingApplication
       return result;
     }
   }
-  
+
   /**
    * For a given route, track average speed for the window. Calculate whether
    * traffic is currently slowing down, via a predefined threshold. If a supermajority of
@@ -261,7 +261,7 @@ public class TrafficRoutes implements StreamingApplication
       return result;
     }
   }
-  
+
   /**
    * Output Pojo class for outputting result to JDBC.
    */
@@ -271,11 +271,11 @@ public class TrafficRoutes implements StreamingApplication
     private Boolean slowdownEvent;
     private String key;
     private Long timestamp;
-    
+
     public OutputPojo()
     {
     }
- 
+
     public OutputPojo(Double avgSpeed, Boolean slowdownEvent, String key, Long timestamp)
     {
       this.avgSpeed = avgSpeed;
@@ -283,64 +283,64 @@ public class TrafficRoutes implements StreamingApplication
       this.key = key;
       this.timestamp = timestamp;
     }
-  
+
     @Override
     public String toString()
     {
       return key + " + " + avgSpeed + " + " + slowdownEvent + " + " + timestamp;
     }
-  
+
     public void setTimestamp(Long timestamp)
     {
       this.timestamp = timestamp;
     }
-  
+
     public Long getTimestamp()
     {
       return timestamp;
     }
-  
+
     public void setAvgSpeed(Double avgSpeed)
     {
       this.avgSpeed = avgSpeed;
     }
-  
+
     public Double getAvgSpeed()
     {
       return avgSpeed;
     }
-  
+
     public void setKey(String key)
     {
       this.key = key;
     }
-  
+
     public String getKey()
     {
       return key;
     }
-  
+
     public void setSlowdownEvent(Boolean slowdownEvent)
     {
       this.slowdownEvent = slowdownEvent;
     }
-  
+
     public Boolean getSlowdownEvent()
     {
       return slowdownEvent;
     }
-    
+
   }
-  
+
   public static class Collector extends BaseOperator
   {
     private static Map<KeyValPair<Long, String>, KeyValPair<Double, Boolean>> result = new HashMap<>();
-  
+
     public static Map<KeyValPair<Long, String>, KeyValPair<Double, Boolean>> getResult()
     {
       return result;
     }
-    
+
     public final transient DefaultInputPort<OutputPojo> input = new DefaultInputPort<OutputPojo>()
     {
       @Override
@@ -350,7 +350,7 @@ public class TrafficRoutes implements StreamingApplication
       }
     };
   }
-  
+
   /**
    * Format the results of the slowdown calculations to a OutputPojo.
    */
@@ -364,8 +364,8 @@ public class TrafficRoutes implements StreamingApplication
       return row;
     }
   }
-    
-  
+
+
   /**
    * This composite transformation extracts speed info from traffic station readings.
    * It groups the readings by 'route' and analyzes traffic slowdown for that route.
@@ -389,19 +389,19 @@ public class TrafficRoutes implements StreamingApplication
               return new Tuple.TimestampedTuple<>(input.getValue().getTimestamp(), input);
             }
           }, name("GroupByKey"));
-      
+
       // Analyze 'slowdown' over the route readings.
       WindowedStream<Tuple.TimestampedTuple<KeyValPair<String, RouteInfo>>> stats = timeGroup
           .flatMap(new GatherStats(), name("GatherStats"));
-      
+
       // Format the results for writing to JDBC table.
       WindowedStream<OutputPojo> results = stats.map(new FormatStatsFn(), name("FormatStatsFn"));
-      
+
       return results;
     }
   }
-  
-  
+
+
   private static Double tryParseAvgSpeed(String[] inputItems)
   {
     try {
@@ -412,27 +412,27 @@ public class TrafficRoutes implements StreamingApplication
       return null;
     }
   }
-  
+
   private static String tryParseStationType(String[] inputItems)
   {
     return tryParseString(inputItems, 2);
   }
-  
+
   private static String tryParseStationId(String[] inputItems)
   {
     return tryParseString(inputItems, 1);
   }
-  
+
   private static String tryParseTimestamp(String[] inputItems)
   {
     return tryParseString(inputItems, 0);
   }
-  
+
   private static String tryParseString(String[] inputItems, int index)
   {
     return inputItems.length >= index ? inputItems[index] : null;
   }
-  
+
   /**
    * Define some small hard-wired San Diego 'routes' to track based on sensor station ID.
    */
@@ -444,33 +444,33 @@ public class TrafficRoutes implements StreamingApplication
     stations.put("1108702", "SDRoute2");
     return stations;
   }
-  
+
   /**
    * A dummy generator to generate some traffic information.
    */
   public static class InfoGen extends BaseOperator implements InputOperator
   {
     public transient DefaultOutputPort<String> output = new DefaultOutputPort<>();
-    
+
     private String[] stationTypes = new String[]{"ML", "BL", "GL"};
     private int[] stationIDs = new int[]{1108413, 1108699, 1108702};
     private double ave = 55.0;
     private long timestamp;
     private static final Duration RAND_RANGE = Duration.standardMinutes(10);
     private static int tupleCount = 0;
-  
+
     public static int getTupleCount()
     {
       return tupleCount;
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
       tupleCount = 0;
       timestamp = System.currentTimeMillis();
     }
-  
+
     @Override
     public void emitTuples()
     {
@@ -481,7 +481,7 @@ public class TrafficRoutes implements StreamingApplication
           try {
             output.emit(time + "," + stationID + "," + stationType + "," + speed);
             tupleCount++;
-         
+
             Thread.sleep(50);
           } catch (Exception e) {
             // Ignore it
@@ -490,29 +490,29 @@ public class TrafficRoutes implements StreamingApplication
       }
     }
   }
-  
+
   @Override
   public void populateDAG(DAG dag, Configuration conf)
   {
     InfoGen infoGen = new InfoGen();
     Collector collector = new Collector();
-    
+
     // Create a stream from the input operator.
     ApexStream<Tuple.TimestampedTuple<String>> stream = StreamFactory.fromInput(infoGen, infoGen.output, name("infoGen"))
-        
+
         // Extract the timestamp from the input and wrap it into a TimestampedTuple.
         .map(new ExtractTimestamps(), name("ExtractTimestamps"));
-    
+
     stream
         // Extract the average speed of a station.
         .flatMap(new ExtractStationSpeedFn(), name("ExtractStationSpeedFn"))
-      
+
         // Apply window and trigger option.
         .window(new WindowOption.SlidingTimeWindows(Duration.standardMinutes(WINDOW_DURATION), Duration.standardMinutes(WINDOW_SLIDE_EVERY)), new TriggerOption().withEarlyFiringsAtEvery(Duration.millis(5000)).accumulatingFiredPanes())
-        
+
         // Apply TrackSpeed composite transformation to compute the route information.
         .addCompositeStreams(new TrackSpeed())
-      
+
         // print the result to console.
         .print()
         .endWith(collector, collector.input, name("Collector"))

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TwitterAutoComplete.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TwitterAutoComplete.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TwitterAutoComplete.java
index ecad622..a4fdf24 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TwitterAutoComplete.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/complete/TwitterAutoComplete.java
@@ -200,7 +200,8 @@ public class TwitterAutoComplete implements StreamingApplication
             {
               return new Tuple.PlainTuple<>(new KeyValPair<>(input, 1L));
             }
-          }, name("Hashtag Count")).map(new Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String,Long>>, CompletionCandidate>()
+          }, name("Hashtag Count"))
+          .map(new Function.MapFunction<Tuple.WindowedTuple<KeyValPair<String,Long>>, CompletionCandidate>()
           {
             @Override
             public CompletionCandidate f(Tuple.WindowedTuple<KeyValPair<String, Long>> input)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamples.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamples.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamples.java
index d88a8dc..7c16521 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamples.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamples.java
@@ -94,7 +94,7 @@ public class CombinePerKeyExamples implements StreamingApplication
       return new SampleBean(input.getValue().getKey(), input.getValue().getValue());
     }
   }
-  
+
   /**
    * A reduce function to concat two strings together.
    */
@@ -106,7 +106,7 @@ public class CombinePerKeyExamples implements StreamingApplication
       return input1 + ", " + input2;
     }
   }
-  
+
   /**
    * Reads the public 'Shakespeare' data, and for each word in the dataset
    * over a given length, generates a string containing the list of play names
@@ -114,17 +114,17 @@ public class CombinePerKeyExamples implements StreamingApplication
    */
   private static class PlaysForWord extends CompositeStreamTransform<ApexStream<SampleBean>, WindowedStream<SampleBean>>
   {
-    
+
     @Override
     public WindowedStream<SampleBean> compose(ApexStream<SampleBean> inputStream)
     {
       return inputStream
           // Extract words from the input SampleBeam stream.
           .map(new ExtractLargeWordsFn(), name("ExtractLargeWordsFn"))
-          
+
           // Apply window and trigger option to the streams.
           .window(new WindowOption.GlobalWindow(), new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1))
-        
+
           // Apply reduceByKey transformation to concat the names of all the plays that a word has appeared in together.
           .reduceByKey(new Concat(), new Function.ToKeyValue<KeyValPair<String,String>, String, String>()
           {
@@ -134,13 +134,13 @@ public class CombinePerKeyExamples implements StreamingApplication
               return new Tuple.PlainTuple<KeyValPair<String, String>>(input);
             }
           }, name("Concat"))
-        
+
           // Format the output back to a SampleBeam object.
           .map(new FormatShakespeareOutputFn(), name("FormatShakespeareOutputFn"));
     }
   }
-  
-  
+
+
   /**
    * A Java Beam class that contains information about a word appears in a corpus written by Shakespeare.
    */
@@ -157,13 +157,13 @@ public class CombinePerKeyExamples implements StreamingApplication
       this.word = word;
       this.corpus = corpus;
     }
-  
+
     @Override
     public String toString()
     {
       return this.word + " : "  + this.corpus;
     }
-  
+
     private String word;
 
     private String corpus;
@@ -188,7 +188,7 @@ public class CombinePerKeyExamples implements StreamingApplication
       return corpus;
     }
   }
-  
+
   /**
    * A dummy info generator to generate {@link SampleBean} objects to mimic reading from real 'Shakespeare'
    * data.
@@ -200,19 +200,19 @@ public class CombinePerKeyExamples implements StreamingApplication
     private String[] words = new String[]{"A", "B", "C", "D", "E", "F", "G"};
     private String[] corpuses = new String[]{"1", "2", "3", "4", "5", "6", "7", "8"};
     private static int i;
-  
+
     public static int getI()
     {
       return i;
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
       super.setup(context);
       i = 0;
     }
-  
+
     @Override
     public void emitTuples()
     {
@@ -229,20 +229,20 @@ public class CombinePerKeyExamples implements StreamingApplication
         }
         i++;
       }
-    
+
     }
   }
-  
+
   public static class Collector extends BaseOperator
   {
     static List<SampleBean> result;
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
       result = new ArrayList<>();
     }
-  
+
     public final transient DefaultInputPort<SampleBean> input = new DefaultInputPort<SampleBean>()
     {
       @Override
@@ -252,7 +252,7 @@ public class CombinePerKeyExamples implements StreamingApplication
       }
     };
   }
-  
+
   /**
    * Populate dag using High-Level API.
    * @param dag
@@ -268,6 +268,6 @@ public class CombinePerKeyExamples implements StreamingApplication
       .print()
       .endWith(collector, collector.input, name("Collector"))
       .populateDag(dag);
-    
+
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExample.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExample.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExample.java
index 2930010..0cd7c58 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExample.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExample.java
@@ -47,22 +47,22 @@ import static org.apache.apex.malhar.stream.api.Option.Options.name;
 @ApplicationAnnotation(name = "DeDupExample")
 public class DeDupExample implements StreamingApplication
 {
-  
+
   public static class Collector extends BaseOperator
   {
     private static Tuple.WindowedTuple<List<String>> result;
     private static boolean done = false;
-  
+
     public static Tuple.WindowedTuple<List<String>> getResult()
     {
       return result;
     }
-  
+
     public static boolean isDone()
     {
       return done;
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
@@ -70,7 +70,7 @@ public class DeDupExample implements StreamingApplication
       result = new Tuple.WindowedTuple<>();
       done = false;
     }
-  
+
     public transient DefaultInputPort<Tuple.WindowedTuple<List<String>>> input = new DefaultInputPort<Tuple.WindowedTuple<List<String>>>()
     {
       @Override
@@ -83,15 +83,15 @@ public class DeDupExample implements StreamingApplication
       }
     };
   }
-    
+
   @Override
   public void populateDAG(DAG dag, Configuration conf)
   {
     Collector collector = new Collector();
-    
+
     // Create a stream that reads from files in a local folder and output lines one by one to downstream.
     ApexStream<String> stream = StreamFactory.fromFolder("./src/test/resources/wordcount", name("textInput"))
-      
+
         // Extract all the words from the input line of text.
         .flatMap(new Function.FlatMapFunction<String, String>()
         {
@@ -101,7 +101,7 @@ public class DeDupExample implements StreamingApplication
             return Arrays.asList(input.split("[\\p{Punct}\\s]+"));
           }
         }, name("ExtractWords"))
-      
+
         // Change the words to lower case, also shutdown the app when the word "bye" is detected.
         .map(new Function.MapFunction<String, String>()
         {
@@ -111,14 +111,14 @@ public class DeDupExample implements StreamingApplication
             return input.toLowerCase();
           }
         }, name("ToLowerCase"));
-    
+
     // Apply window and trigger option.
     stream.window(new WindowOption.GlobalWindow(),
         new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(Duration.standardSeconds(1)))
-        
+
         // Remove the duplicate words and print out the result.
         .accumulate(new RemoveDuplicates<String>(), name("RemoveDuplicates")).print().endWith(collector, collector.input)
-    
+
         .populateDag(dag);
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/InputPojo.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/InputPojo.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/InputPojo.java
index 3643eab..1ba2a90 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/InputPojo.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/InputPojo.java
@@ -27,48 +27,48 @@ public class InputPojo extends Object
   private int day;
   private int year;
   private double meanTemp;
-  
+
   @Override
   public String toString()
   {
     return "PojoEvent [month=" + getMonth() + ", day=" + getDay() + ", year=" + getYear() + ", meanTemp=" + getMeanTemp() + "]";
   }
-  
+
   public void setMonth(int month)
   {
     this.month = month;
   }
-  
+
   public int getMonth()
   {
     return this.month;
   }
-  
+
   public void setDay(int day)
   {
     this.day = day;
   }
-  
+
   public int getDay()
   {
     return day;
   }
-  
+
   public void setYear(int year)
   {
     this.year = year;
   }
-  
+
   public int getYear()
   {
     return year;
   }
-  
+
   public void setMeanTemp(double meanTemp)
   {
     this.meanTemp = meanTemp;
   }
-  
+
   public double getMeanTemp()
   {
     return meanTemp;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamples.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamples.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamples.java
index 02980e4..4538aef 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamples.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamples.java
@@ -55,7 +55,7 @@ import static org.apache.apex.malhar.stream.api.Option.Options.name;
 @ApplicationAnnotation(name = "MaxPerKeyExamples")
 public class MaxPerKeyExamples implements StreamingApplication
 {
-  
+
   /**
    *  A map function to extract the mean temperature from {@link InputPojo}.
    */
@@ -69,8 +69,8 @@ public class MaxPerKeyExamples implements StreamingApplication
       return new KeyValPair<Integer, Double>(month, meanTemp);
     }
   }
-  
-  
+
+
   /**
    * A map function to format output to {@link OutputPojo}.
    */
@@ -85,7 +85,7 @@ public class MaxPerKeyExamples implements StreamingApplication
       return row;
     }
   }
-  
+
   /**
    * A composite transformation to perform three tasks:
    * 1. extract the month and its mean temperature from input pojo.
@@ -99,7 +99,7 @@ public class MaxPerKeyExamples implements StreamingApplication
     {
       // InputPojo... => <month, meanTemp> ...
       WindowedStream<KeyValPair<Integer, Double>> temps = rows.map(new ExtractTempFn(), name("ExtractTempFn"));
-      
+
       // month, meanTemp... => <month, max mean temp>...
       WindowedStream<Tuple.WindowedTuple<KeyValPair<Integer, Double>>> tempMaxes =
           temps.accumulateByKey(new Max<Double>(),
@@ -111,14 +111,14 @@ public class MaxPerKeyExamples implements StreamingApplication
                 return new Tuple.WindowedTuple<KeyValPair<Integer, Double>>(Window.GLOBAL_WINDOW, input);
               }
             }, name("MaxPerMonth"));
-      
+
       // <month, max>... => OutputPojo...
       WindowedStream<OutputPojo> results = tempMaxes.map(new FormatMaxesFn(), name("FormatMaxesFn"));
-      
+
       return results;
     }
   }
-  
+
   /**
    * Method to set field info for {@link JdbcPOJOInputOperator}.
    * @return
@@ -132,7 +132,7 @@ public class MaxPerKeyExamples implements StreamingApplication
     fieldInfos.add(new FieldInfo("MEANTEMP", "meanTemp", FieldInfo.SupportType.DOUBLE));
     return fieldInfos;
   }
-  
+
   /**
    * Method to set field info for {@link JdbcPOJOInsertOutputOperator}.
    * @return
@@ -144,8 +144,8 @@ public class MaxPerKeyExamples implements StreamingApplication
     fieldInfos.add(new JdbcFieldInfo("MEANTEMP", "meanTemp", JdbcFieldInfo.SupportType.DOUBLE, DOUBLE));
     return fieldInfos;
   }
-  
-  
+
+
   /**
    * Populate the dag using High-Level API.
    * @param dag
@@ -156,21 +156,21 @@ public class MaxPerKeyExamples implements StreamingApplication
   {
     JdbcPOJOInputOperator jdbcInput = new JdbcPOJOInputOperator();
     jdbcInput.setFieldInfos(addInputFieldInfos());
-  
+
     JdbcStore store = new JdbcStore();
     jdbcInput.setStore(store);
-  
+
     JdbcPOJOInsertOutputOperator jdbcOutput = new JdbcPOJOInsertOutputOperator();
     jdbcOutput.setFieldInfos(addOutputFieldInfos());
     JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
     jdbcOutput.setStore(outputStore);
-    
+
     // Create stream that reads from a Jdbc Input.
     ApexStream<Object> stream = StreamFactory.fromInput(jdbcInput, jdbcInput.outputPort, name("jdbcInput"))
-      
+
         // Apply window and trigger option to the stream.
         .window(new WindowOption.GlobalWindow(), new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1))
-      
+
         // Because Jdbc Input sends out a stream of Object, need to cast them to InputPojo.
         .map(new Function.MapFunction<Object, InputPojo>()
         {
@@ -180,10 +180,10 @@ public class MaxPerKeyExamples implements StreamingApplication
             return (InputPojo)input;
           }
         }, name("ObjectToInputPojo"))
-      
+
         // Plug in the composite transformation to the stream to calculate the maximum temperature for each month.
         .addCompositeStreams(new MaxMeanTemp())
-      
+
         // Cast the resulted OutputPojo to Object for Jdbc Output to consume.
         .map(new Function.MapFunction<OutputPojo, Object>()
         {
@@ -193,11 +193,11 @@ public class MaxPerKeyExamples implements StreamingApplication
             return (Object)input;
           }
         }, name("OutputPojoToObject"))
-      
+
         // Output the result to Jdbc Output.
         .endWith(jdbcOutput, jdbcOutput.input, name("jdbcOutput"));
-    
+
     stream.populateDag(dag);
-  
+
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/OutputPojo.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/OutputPojo.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/OutputPojo.java
index db2a09e..59831b7 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/OutputPojo.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/OutputPojo.java
@@ -25,28 +25,28 @@ public class OutputPojo
 {
   private int month;
   private double meanTemp;
-  
+
   @Override
   public String toString()
   {
     return "PojoEvent [month=" + getMonth() + ", meanTemp=" + getMeanTemp() + "]";
   }
-  
+
   public void setMonth(int month)
   {
     this.month = month;
   }
-  
+
   public int getMonth()
   {
     return this.month;
   }
-  
+
   public void setMeanTemp(double meanTemp)
   {
     this.meanTemp = meanTemp;
   }
-  
+
   public double getMeanTemp()
   {
     return meanTemp;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/TriggerExample.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/TriggerExample.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/TriggerExample.java
index bf23e3a..dd09352 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/TriggerExample.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/cookbook/TriggerExample.java
@@ -189,7 +189,7 @@ public class TriggerExample
       // At 11:03:00 (processing time) the system watermark may have advanced to 10:54:00. As a
       // result, when the data record with event time 10:05:00 arrives at 11:03:00, it is considered
       // late, and dropped.
-  
+
       WindowedStream<SampleBean> defaultTriggerResults = inputStream
           .window(new WindowOption.TimeWindows(Duration.standardMinutes(windowDuration)),
           new TriggerOption().discardingFiredPanes())
@@ -306,7 +306,7 @@ public class TriggerExample
     @Override
     public WindowedStream<SampleBean> compose(WindowedStream<String> inputStream)
     {
-  
+
       WindowedStream<KeyValPair<String, Iterable<Integer>>> flowPerFreeway = inputStream
           .groupByKey(new ExtractFlowInfo());
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/MinimalWordCountTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/MinimalWordCountTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/MinimalWordCountTest.java
index 101953f..d32da72 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/MinimalWordCountTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/MinimalWordCountTest.java
@@ -37,11 +37,11 @@ public class MinimalWordCountTest
   {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
-  
+
     MinimalWordCount app = new MinimalWordCount();
 
     lma.prepareDAG(app, conf);
- 
+
     LocalMode.Controller lc = lma.getController();
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
@@ -51,9 +51,9 @@ public class MinimalWordCountTest
         return MinimalWordCount.Collector.isDone();
       }
     });
-    
+
     lc.run(10000);
-  
+
     Assert.assertTrue(MinimalWordCount.Collector.result.get("error") == 7);
     Assert.assertTrue(MinimalWordCount.Collector.result.get("word") == 119);
     Assert.assertTrue(MinimalWordCount.Collector.result.get("bye") == 1);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/WindowedWordCountTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/WindowedWordCountTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/WindowedWordCountTest.java
index 952356f..f6270d4 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/WindowedWordCountTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/WindowedWordCountTest.java
@@ -56,16 +56,16 @@ public class WindowedWordCountTest
         return WindowedWordCount.TextInput.isDone();
       }
     });
-    
+
     lc.run(60000);
-    
+
     Assert.assertEquals(127, countSum(WindowedWordCount.Collector.getResult()));
     Assert.assertEquals(28, countSumWord(WindowedWordCount.Collector.getResult(), "word2"));
     Assert.assertEquals(7, countSumWord(WindowedWordCount.Collector.getResult(), "error"));
     Assert.assertEquals(21, countSumWord(WindowedWordCount.Collector.getResult(), "word9"));
     Assert.assertEquals(1, countSumWord(WindowedWordCount.Collector.getResult(), "bye"));
   }
-  
+
   public long countSum(Map<KeyValPair<Long, String>, Long> map)
   {
     long sum = 0;
@@ -74,7 +74,7 @@ public class WindowedWordCountTest
     }
     return sum;
   }
-  
+
   public long countSumWord(Map<KeyValPair<Long, String>, Long> map, String word)
   {
     long sum = 0;
@@ -85,6 +85,6 @@ public class WindowedWordCountTest
     }
     return sum;
   }
-  
+
 }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/AutoCompleteTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/AutoCompleteTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/AutoCompleteTest.java
index dc236f9..26bb13e 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/AutoCompleteTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/AutoCompleteTest.java
@@ -41,7 +41,7 @@ public class AutoCompleteTest
     Configuration conf = new Configuration(false);
     lma.prepareDAG(new AutoComplete(), conf);
     LocalMode.Controller lc = lma.getController();
-    
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -50,9 +50,9 @@ public class AutoCompleteTest
         return AutoComplete.TweetsInput.isDone();
       }
     });
-    
+
     lc.run(200000);
-  
+
     Assert.assertTrue(AutoComplete.Collector.getResult().containsKey("chi"));
     Assert.assertTrue(AutoComplete.Collector.getResult().containsKey("china"));
     Assert.assertEquals(2, AutoComplete.Collector.getResult().get("china").get(0).getCount());
@@ -61,6 +61,6 @@ public class AutoCompleteTest
     Assert.assertEquals(3, AutoComplete.Collector.getResult().get("f").size());
     Assert.assertTrue(AutoComplete.Collector.getResult().get("f").get(0).getCount() >= AutoComplete.Collector.getResult().get("f").get(1).getCount());
     Assert.assertTrue(AutoComplete.Collector.getResult().get("f").get(1).getCount() >= AutoComplete.Collector.getResult().get("f").get(2).getCount());
-  
+
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtractTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtractTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtractTest.java
index bf9b030..dc9cdec 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtractTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/StreamingWordExtractTest.java
@@ -54,10 +54,10 @@ public class StreamingWordExtractTest
   {
     try {
       Class.forName(DB_DRIVER).newInstance();
-      
+
       Connection con = DriverManager.getConnection(DB_URL,USER_NAME,PSW);
       Statement stmt = con.createStatement();
-      
+
       String createMetaTable = "CREATE TABLE IF NOT EXISTS " + JdbcTransactionalStore.DEFAULT_META_TABLE + " ( "
           + JdbcTransactionalStore.DEFAULT_APP_ID_COL + " VARCHAR(100) NOT NULL, "
           + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + " INT NOT NULL, "
@@ -66,16 +66,16 @@ public class StreamingWordExtractTest
           + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + ", " + JdbcTransactionalStore.DEFAULT_WINDOW_COL + ") "
           + ")";
       stmt.executeUpdate(createMetaTable);
-      
+
       String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME
           + "(STRINGVALUE VARCHAR(255))";
       stmt.executeUpdate(createTable);
-      
+
     } catch (Throwable e) {
       throw Throwables.propagate(e);
     }
   }
-  
+
   @After
   public void cleanTable()
   {
@@ -88,7 +88,7 @@ public class StreamingWordExtractTest
       throw new RuntimeException(e);
     }
   }
-  
+
   public void setConfig(Configuration conf)
   {
     conf.set("dt.operator.jdbcOutput.prop.store.userName", USER_NAME);
@@ -99,14 +99,14 @@ public class StreamingWordExtractTest
     conf.set("dt.operator.jdbcOutput.prop.store.databaseUrl", DB_URL);
     conf.set("dt.operator.jdbcOutput.prop.tablename", TABLE_NAME);
   }
-  
+
   public int getNumOfEventsInStore()
   {
     Connection con;
     try {
       con = DriverManager.getConnection(DB_URL,USER_NAME,PSW);
       Statement stmt = con.createStatement();
-      
+
       String countQuery = "SELECT count(*) from " + TABLE_NAME;
       ResultSet resultSet = stmt.executeQuery(countQuery);
       resultSet.next();
@@ -115,7 +115,7 @@ public class StreamingWordExtractTest
       throw new RuntimeException("fetching count", e);
     }
   }
-  
+
   @Test
   public void StreamingWordExtractTest() throws Exception
   {
@@ -125,7 +125,7 @@ public class StreamingWordExtractTest
     StreamingWordExtract app = new StreamingWordExtract();
     lma.prepareDAG(app, conf);
     LocalMode.Controller lc = lma.getController();
-    
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -134,11 +134,11 @@ public class StreamingWordExtractTest
         return getNumOfEventsInStore() == 36;
       }
     });
-    
+
     lc.run(10000);
-  
+
     Assert.assertEquals(app.getWordCount(), getNumOfEventsInStore());
     Assert.assertEquals(app.getEntriesMapped(), getNumOfEventsInStore());
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessionsTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessionsTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessionsTest.java
index f8ec086..c0dbaf4 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessionsTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TopWikipediaSessionsTest.java
@@ -41,7 +41,7 @@ public class TopWikipediaSessionsTest
     Configuration conf = new Configuration(false);
     lma.prepareDAG(new TopWikipediaSessions(), conf);
     LocalMode.Controller lc = lma.getController();
-    
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -50,14 +50,14 @@ public class TopWikipediaSessionsTest
         return TopWikipediaSessions.SessionGen.getTupleCount() >= 250;
       }
     });
-    
+
     lc.run(30000);
-    
+
     for (int i = 0; i < TopWikipediaSessions.Collector.getResult().size(); i++) {
       Assert.assertTrue(isInOrder(TopWikipediaSessions.Collector.getResult().get(i)));
     }
   }
-  
+
   public boolean isInOrder(List<TopWikipediaSessions.TempWrapper> input)
   {
     if (input.size() == 0 || input.size() == 1) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutesTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutesTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutesTest.java
index e363ca7..c532898 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutesTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/complete/TrafficRoutesTest.java
@@ -43,7 +43,7 @@ public class TrafficRoutesTest
     Configuration conf = new Configuration(false);
     lma.prepareDAG(new TrafficRoutes(), conf);
     LocalMode.Controller lc = lma.getController();
-    
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -52,9 +52,9 @@ public class TrafficRoutesTest
         return TrafficRoutes.InfoGen.getTupleCount() >= 100;
       }
     });
-    
+
     lc.run(60000);
-    
+
     Assert.assertTrue(!TrafficRoutes.Collector.getResult().isEmpty());
     for (Map.Entry<KeyValPair<Long, String>, KeyValPair<Double, Boolean>> entry : TrafficRoutes.Collector.getResult().entrySet()) {
       Assert.assertTrue(entry.getValue().getKey() <= 75);
@@ -62,5 +62,5 @@ public class TrafficRoutesTest
       Assert.assertTrue(entry.getKey().getValue().equals("SDRoute1") || entry.getKey().getValue().equals("SDRoute2"));
     }
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamplesTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamplesTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamplesTest.java
index 5858013..b130808 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamplesTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/CombinePerKeyExamplesTest.java
@@ -35,11 +35,11 @@ public class CombinePerKeyExamplesTest
   {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
-  
+
     CombinePerKeyExamples app = new CombinePerKeyExamples();
-      
+
     lma.prepareDAG(app, conf);
-    
+
     LocalMode.Controller lc = lma.getController();
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
@@ -50,7 +50,7 @@ public class CombinePerKeyExamplesTest
       }
     });
     lc.run(100000);
-  
+
     Assert.assertTrue(CombinePerKeyExamples.Collector.result.get(CombinePerKeyExamples.Collector.result.size() - 1).getCorpus().contains("1, 2, 3, 4, 5, 6, 7, 8"));
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExampleTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExampleTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExampleTest.java
index ed4ddb4..a175cd7 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExampleTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/DeDupExampleTest.java
@@ -38,7 +38,7 @@ public class DeDupExampleTest
   {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
-    
+
     DeDupExample app = new DeDupExample();
     lma.prepareDAG(app, conf);
     LocalMode.Controller lc = lma.getController();
@@ -51,9 +51,9 @@ public class DeDupExampleTest
       }
     });
     lc.run(50000);
-  
+
     Assert.assertEquals(9, DeDupExample.Collector.getResult().getValue().size());
-    
+
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamplesTest.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamplesTest.java b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamplesTest.java
index 51981de..ec28b40 100644
--- a/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamplesTest.java
+++ b/demos/highlevelapi/src/test/java/org/apache/apex/malhar/stream/sample/cookbook/MaxPerKeyExamplesTest.java
@@ -46,7 +46,7 @@ import com.datatorrent.stram.StramLocalCluster;
  */
 public class MaxPerKeyExamplesTest
 {
-  
+
   private static final String INPUT_TUPLE_CLASS = "org.apache.apex.malhar.stream.sample.cookbook.InputPojo";
   private static final String OUTPUT_TUPLE_CLASS = "org.apache.apex.malhar.stream.sample.cookbook.OutputPojo";
   private static final String DB_DRIVER = "org.h2.Driver";
@@ -56,18 +56,18 @@ public class MaxPerKeyExamplesTest
   private static final String USER_NAME = "root";
   private static final String PSW = "password";
   private static final String QUERY = "SELECT * FROM " + INPUT_TABLE + ";";
-  
+
   private static final double[] MEANTEMPS = {85.3, 75.4};
-  
+
   @BeforeClass
   public static void setup()
   {
     try {
       Class.forName(DB_DRIVER).newInstance();
-      
+
       Connection con = DriverManager.getConnection(DB_URL,USER_NAME,PSW);
       Statement stmt = con.createStatement();
-      
+
       String createMetaTable = "CREATE TABLE IF NOT EXISTS " + JdbcTransactionalStore.DEFAULT_META_TABLE + " ( "
           + JdbcTransactionalStore.DEFAULT_APP_ID_COL + " VARCHAR(100) NOT NULL, "
           + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + " INT NOT NULL, "
@@ -76,53 +76,53 @@ public class MaxPerKeyExamplesTest
           + JdbcTransactionalStore.DEFAULT_OPERATOR_ID_COL + ", " + JdbcTransactionalStore.DEFAULT_WINDOW_COL + ") "
           + ")";
       stmt.executeUpdate(createMetaTable);
-      
+
       String createInputTable = "CREATE TABLE IF NOT EXISTS " + INPUT_TABLE
           + "(MONTH INT(2) not NULL, DAY INT(2), YEAR INT(4), MEANTEMP DOUBLE(10) )";
       stmt.executeUpdate(createInputTable);
-  
+
       String createOutputTable = "CREATE TABLE IF NOT EXISTS " + OUTPUT_TABLE
           + "(MONTH INT(2) not NULL, MEANTEMP DOUBLE(10) )";
       stmt.executeUpdate(createOutputTable);
-      
+
       String cleanTable = "truncate table " + INPUT_TABLE;
       stmt.executeUpdate(cleanTable);
-  
+
       stmt = con.createStatement();
-  
+
       String sql = "INSERT INTO " + INPUT_TABLE + " VALUES (6, 21, 2014, 85.3)";
       stmt.executeUpdate(sql);
       sql = "INSERT INTO " + INPUT_TABLE + " VALUES (7, 20, 2014, 75.4)";
       stmt.executeUpdate(sql);
       sql = "INSERT INTO " + INPUT_TABLE + " VALUES (6, 18, 2014, 45.3)";
       stmt.executeUpdate(sql);
-      
+
     } catch (Throwable e) {
       throw Throwables.propagate(e);
     }
   }
-  
+
   @AfterClass
   public static void cleanup()
   {
     try {
       Class.forName(DB_DRIVER).newInstance();
-  
+
       Connection con = DriverManager.getConnection(DB_URL, USER_NAME, PSW);
       Statement stmt = con.createStatement();
-  
+
       String dropInputTable = "DROP TABLE " + INPUT_TABLE;
       stmt.executeUpdate(dropInputTable);
-  
+
       String dropOutputTable = "DROP TABLE " + OUTPUT_TABLE;
       stmt.executeUpdate(dropOutputTable);
-      
+
     } catch (Throwable e) {
       throw Throwables.propagate(e);
     }
-    
+
   }
-  
+
   public void setConfig(Configuration conf)
   {
     conf.set("dt.operator.jdbcInput.prop.store.userName", USER_NAME);
@@ -133,7 +133,7 @@ public class MaxPerKeyExamplesTest
     conf.set("dt.operator.jdbcInput.prop.store.databaseUrl", DB_URL);
     conf.set("dt.operator.jdbcInput.prop.tableName", INPUT_TABLE);
     conf.set("dt.operator.jdbcInput.prop.query", QUERY);
-  
+
     conf.set("dt.operator.jdbcOutput.prop.store.userName", USER_NAME);
     conf.set("dt.operator.jdbcOutput.prop.store.password", PSW);
     conf.set("dt.operator.jdbcOutput.prop.store.databaseDriver", DB_DRIVER);
@@ -142,14 +142,14 @@ public class MaxPerKeyExamplesTest
     conf.set("dt.operator.jdbcOutput.prop.store.databaseUrl", DB_URL);
     conf.set("dt.operator.jdbcOutput.prop.tablename", OUTPUT_TABLE);
   }
-  
+
   public int getNumEntries()
   {
     Connection con;
     try {
       con = DriverManager.getConnection(DB_URL,USER_NAME,PSW);
       Statement stmt = con.createStatement();
-    
+
       String countQuery = "SELECT count(DISTINCT (MONTH, MEANTEMP)) from " + OUTPUT_TABLE;
       ResultSet resultSet = stmt.executeQuery(countQuery);
       resultSet.next();
@@ -158,7 +158,7 @@ public class MaxPerKeyExamplesTest
       throw new RuntimeException("fetching count", e);
     }
   }
-  
+
   public Map<Integer, Double> getMaxMeanTemp()
   {
     Map<Integer, Double> result = new HashMap<>();
@@ -166,30 +166,30 @@ public class MaxPerKeyExamplesTest
     try {
       con = DriverManager.getConnection(DB_URL,USER_NAME,PSW);
       Statement stmt = con.createStatement();
-    
+
       String countQuery = "SELECT DISTINCT * from " + OUTPUT_TABLE;
       ResultSet resultSet = stmt.executeQuery(countQuery);
       while (resultSet.next()) {
         result.put(resultSet.getInt("MONTH"), resultSet.getDouble("MEANTEMP"));
-        
+
       }
       return result;
     } catch (SQLException e) {
       throw new RuntimeException("fetching count", e);
     }
   }
-  
+
   @Test
   public void MaxPerKeyExampleTest() throws Exception
   {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
     setConfig(conf);
-    
+
     MaxPerKeyExamples app = new MaxPerKeyExamples();
-  
+
     lma.prepareDAG(app, conf);
-  
+
     LocalMode.Controller lc = lma.getController();
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
@@ -199,9 +199,9 @@ public class MaxPerKeyExamplesTest
         return getNumEntries() == 2;
       }
     });
-    
+
     lc.run(5000);
-    
+
     double[] result = new double[2];
     result[0] = getMaxMeanTemp().get(6);
     result[1] = getMaxMeanTemp().get(7);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/iteration/src/main/java/com/datatorrent/demos/iteration/Application.java
----------------------------------------------------------------------
diff --git a/demos/iteration/src/main/java/com/datatorrent/demos/iteration/Application.java b/demos/iteration/src/main/java/com/datatorrent/demos/iteration/Application.java
index 5f93206..7fbdfd1 100644
--- a/demos/iteration/src/main/java/com/datatorrent/demos/iteration/Application.java
+++ b/demos/iteration/src/main/java/com/datatorrent/demos/iteration/Application.java
@@ -18,23 +18,24 @@
  */
 package com.datatorrent.demos.iteration;
 
-import com.datatorrent.api.Context;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DAG;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.DAG;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.common.util.DefaultDelayOperator;
 import com.datatorrent.lib.testbench.RandomEventGenerator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
 
 /**
  * Iteration demo : <br>
@@ -64,10 +65,10 @@ import java.io.PrintStream;
  *
  * @since 3.4.0
  */
-@ApplicationAnnotation(name="IterationDemo")
+@ApplicationAnnotation(name = "IterationDemo")
 public class Application implements StreamingApplication
 {
-  private final static Logger LOG = LoggerFactory.getLogger(Application.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Application.class);
   private String extraOutputFileName; // for unit test
 
   public static class FibonacciOperator extends BaseOperator
@@ -117,7 +118,7 @@ public class Application implements StreamingApplication
       public void process(Object t)
       {
         String s = t.toString();
-        System.out.println(s);
+        LOG.info(s);
         if (extraOutputStream != null) {
           extraOutputStream.println(s);
         }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/iteration/src/test/java/com/datatorrent/demos/iteration/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/iteration/src/test/java/com/datatorrent/demos/iteration/ApplicationTest.java b/demos/iteration/src/test/java/com/datatorrent/demos/iteration/ApplicationTest.java
index 7804fcd..9fb89ac 100644
--- a/demos/iteration/src/test/java/com/datatorrent/demos/iteration/ApplicationTest.java
+++ b/demos/iteration/src/test/java/com/datatorrent/demos/iteration/ApplicationTest.java
@@ -18,17 +18,16 @@
  */
 package com.datatorrent.demos.iteration;
 
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
 
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.api.LocalMode;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
+import org.apache.hadoop.conf.Configuration;
 
+import com.datatorrent.api.LocalMode;
 
 /**
  *
@@ -61,7 +60,8 @@ public class ApplicationTest
       if (file.length() > 50) {
         break;
       }
-    } while (System.currentTimeMillis() - startTime < timeout);
+    }
+    while (System.currentTimeMillis() - startTime < timeout);
 
     lc.shutdown();
     try (BufferedReader br = new BufferedReader(new FileReader(outputFileName))) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/Application.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/Application.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/Application.java
index 32aac35..55b299f 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/Application.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/Application.java
@@ -18,22 +18,23 @@
  */
 package com.datatorrent.demos.machinedata;
 
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
 
+import com.datatorrent.contrib.redis.RedisKeyValPairOutputOperator;
 import com.datatorrent.demos.machinedata.data.MachineKey;
 import com.datatorrent.demos.machinedata.operator.MachineInfoAveragingOperator;
 import com.datatorrent.demos.machinedata.operator.MachineInfoAveragingPrerequisitesOperator;
-import com.datatorrent.contrib.redis.RedisKeyValPairOutputOperator;
 import com.datatorrent.lib.io.SmtpOutputOperator;
 
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * <p>
  * Resource monitor application.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/DimensionGenerator.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/DimensionGenerator.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/DimensionGenerator.java
index 77b39b5..75c2a02 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/DimensionGenerator.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/DimensionGenerator.java
@@ -18,18 +18,13 @@
  */
 package com.datatorrent.demos.machinedata;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.demos.machinedata.data.MachineInfo;
 import com.datatorrent.demos.machinedata.data.MachineKey;
 
-import java.util.*;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * <p>
  * Information tuple generator with randomness.
@@ -42,9 +37,10 @@ public class DimensionGenerator extends BaseOperator
 {
   public transient DefaultOutputPort<MachineInfo> outputInline = new DefaultOutputPort<>();
   public transient DefaultOutputPort<MachineInfo> output = new DefaultOutputPort<>();
-  private int threshold=90;
+  private int threshold = 90;
 
-  public final transient DefaultInputPort<MachineInfo> inputPort = new DefaultInputPort<MachineInfo>() {
+  public final transient DefaultInputPort<MachineInfo> inputPort = new DefaultInputPort<MachineInfo>()
+  {
 
     @Override
     public void process(MachineInfo tuple)
@@ -113,9 +109,9 @@ public class DimensionGenerator extends BaseOperator
       int hdd = tuple.getHdd();
       MachineInfo machineInfo = new MachineInfo();
       machineInfo.setMachineKey(machineKey);
-      machineInfo.setCpu((cpu < threshold)?cpu:threshold);
-      machineInfo.setRam((ram < threshold)?ram:threshold);
-      machineInfo.setHdd((hdd < threshold)?hdd:threshold);
+      machineInfo.setCpu((cpu < threshold) ? cpu : threshold);
+      machineInfo.setRam((ram < threshold) ? ram : threshold);
+      machineInfo.setHdd((hdd < threshold) ? hdd : threshold);
       outputInline.emit(machineInfo);
       output.emit(machineInfo);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/InputReceiver.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/InputReceiver.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/InputReceiver.java
index 560df52..85ec954 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/InputReceiver.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/InputReceiver.java
@@ -18,20 +18,23 @@
  */
 package com.datatorrent.demos.machinedata;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.Context;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.demos.machinedata.data.MachineInfo;
-import com.datatorrent.demos.machinedata.data.MachineKey;
-
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Random;
+import java.util.TimeZone;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.demos.machinedata.data.MachineInfo;
+import com.datatorrent.demos.machinedata.data.MachineKey;
+
 /**
  * <p>
  * Information tuple generator with randomness.
@@ -74,6 +77,7 @@ public class InputReceiver extends BaseOperator implements InputOperator
     dayDateFormat.setTimeZone(tz);
 
   }
+
   @Override
   public void setup(Context.OperatorContext context)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineKey.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineKey.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineKey.java
index 722a77e..2b3bb1c 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineKey.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineKey.java
@@ -305,25 +305,29 @@ public class MachineKey
     if (!(obj instanceof MachineKey)) {
       return false;
     }
-    MachineKey mkey = (MachineKey) obj;
+    MachineKey mkey = (MachineKey)obj;
     return checkStringEqual(this.timeKey, mkey.timeKey) && checkStringEqual(this.day, mkey.day) && checkIntEqual(this.customer, mkey.customer) && checkIntEqual(this.product, mkey.product) && checkIntEqual(this.os, mkey.os) && checkIntEqual(this.software1, mkey.software1) && checkIntEqual(this.software2, mkey.software2) && checkIntEqual(this.software3, mkey.software3) && checkIntEqual(this.deviceId, mkey.deviceId);
   }
 
   private boolean checkIntEqual(Integer a, Integer b)
   {
-    if ((a == null) && (b == null))
+    if ((a == null) && (b == null)) {
       return true;
-    if ((a != null) && a.equals(b))
+    }
+    if ((a != null) && a.equals(b)) {
       return true;
+    }
     return false;
   }
 
   private boolean checkStringEqual(String a, String b)
   {
-    if ((a == null) && (b == null))
+    if ((a == null) && (b == null)) {
       return true;
-    if ((a != null) && a.equals(b))
+    }
+    if ((a != null) && a.equals(b)) {
       return true;
+    }
     return false;
   }
 
@@ -331,20 +335,27 @@ public class MachineKey
   public String toString()
   {
     StringBuilder sb = new StringBuilder(timeKey);
-    if (customer != null)
+    if (customer != null) {
       sb.append("|0:").append(customer);
-    if (product != null)
+    }
+    if (product != null) {
       sb.append("|1:").append(product);
-    if (os != null)
+    }
+    if (os != null) {
       sb.append("|2:").append(os);
-    if (software1 != null)
+    }
+    if (software1 != null) {
       sb.append("|3:").append(software1);
-    if (software2 != null)
+    }
+    if (software2 != null) {
       sb.append("|4:").append(software2);
-    if (software3 != null)
+    }
+    if (software3 != null) {
       sb.append("|5:").append(software3);
-    if (deviceId != null)
+    }
+    if (deviceId != null) {
       sb.append("|6:").append(deviceId);
+    }
     return sb.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/ResourceType.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/ResourceType.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/ResourceType.java
index a0b2ecf..d474c5c 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/ResourceType.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/ResourceType.java
@@ -18,45 +18,49 @@
  */
 package com.datatorrent.demos.machinedata.data;
 
-import com.google.common.collect.Maps;
-
 import java.util.Map;
 
+import com.google.common.collect.Maps;
+
 /**
  * This class captures the resources whose usage is collected for each device
  * <p>ResourceType class.</p>
  *
  * @since 0.3.5
  */
-public enum ResourceType {
+public enum ResourceType
+{
 
-    CPU("cpu"), RAM("ram"), HDD("hdd");
+  CPU("cpu"), RAM("ram"), HDD("hdd");
 
-    private static Map<String, ResourceType> descToResource = Maps.newHashMap();
+  private static Map<String, ResourceType> descToResource = Maps.newHashMap();
 
-    static {
-        for (ResourceType type : ResourceType.values()) {
-            descToResource.put(type.desc, type);
-        }
+  static {
+    for (ResourceType type : ResourceType.values()) {
+      descToResource.put(type.desc, type);
     }
+  }
 
-    private String desc;
+  private String desc;
 
-    private ResourceType(String desc) {
-        this.desc = desc;
-    }
+  private ResourceType(String desc)
+  {
+    this.desc = desc;
+  }
 
-    @Override
-    public String toString() {
-        return desc;
-    }
+  @Override
+  public String toString()
+  {
+    return desc;
+  }
 
-    /**
-     * This method returns ResourceType for the given description
-     * @param desc the description
-     * @return
-     */
-    public static ResourceType getResourceTypeOf(String desc) {
-        return descToResource.get(desc);
-    }
+  /**
+   * This method returns ResourceType for the given description
+   * @param desc the description
+   * @return
+   */
+  public static ResourceType getResourceTypeOf(String desc)
+  {
+    return descToResource.get(desc);
+  }
 }


[5/6] apex-malhar git commit: Fixed checkstyle errors for demos.

Posted by th...@apache.org.
Fixed checkstyle errors for demos.


Project: http://git-wip-us.apache.org/repos/asf/apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/apex-malhar/commit/7d9386d2
Tree: http://git-wip-us.apache.org/repos/asf/apex-malhar/tree/7d9386d2
Diff: http://git-wip-us.apache.org/repos/asf/apex-malhar/diff/7d9386d2

Branch: refs/heads/master
Commit: 7d9386d2abb445954055f6bf8c6d2aa75f896d77
Parents: 846b4a3
Author: MalharJenkins <je...@datatorrent.com>
Authored: Fri Aug 26 13:33:43 2016 -0700
Committer: Shunxin <lu...@hotmail.com>
Committed: Fri Aug 26 14:07:31 2016 -0700

----------------------------------------------------------------------
 .../distributeddistinct/CountVerifier.java      |  10 +-
 .../IntegerUniqueValueCountAppender.java        |  17 +-
 .../RandomKeyValGenerator.java                  |  14 +-
 .../UniqueValueCountAppender.java               |  22 +-
 .../distributeddistinct/ApplicationTest.java    |   3 +-
 .../DistributedDistinctTest.java                |   4 +-
 .../StatefulApplicationTest.java                |  19 +-
 .../StatefulUniqueCountTest.java                |  23 +-
 .../demos/echoserver/Application.java           |   2 +-
 .../demos/echoserver/MessageResponder.java      |   4 +-
 .../demos/echoserver/NetworkManager.java        |  47 ++-
 .../demos/echoserver/ApplicationTest.java       |   6 +-
 .../demos/frauddetect/Application.java          |   8 +-
 .../frauddetect/AverageAlertingOperator.java    |  49 ++-
 .../demos/frauddetect/BankIdNumberKey.java      |   4 +-
 .../BankIdNumberSamplerOperator.java            |  41 +--
 .../CreditCardAmountSamplerOperator.java        |  43 +--
 .../demos/frauddetect/MerchantTransaction.java  |   4 +-
 .../MerchantTransactionBucketOperator.java      |  60 ++--
 .../MerchantTransactionGenerator.java           |  22 +-
 .../MerchantTransactionInputHandler.java        |  17 +-
 .../frauddetect/SlidingWindowSumKeyVal.java     |  56 ++--
 .../frauddetect/SlidingWindowSumObject.java     |   6 +-
 .../frauddetect/TransactionStatsAggregator.java |  26 +-
 .../operator/HdfsStringOutputOperator.java      |   7 +-
 .../operator/MongoDBOutputOperator.java         | 281 +++++++++--------
 .../demos/frauddetect/util/JsonUtils.java       |  17 +-
 .../frauddetect/FrauddetectApplicationTest.java |  40 +--
 .../malhar/stream/sample/MinimalWordCount.java  |  10 +-
 .../malhar/stream/sample/WindowedWordCount.java |  66 ++--
 .../stream/sample/complete/AutoComplete.java    |  17 +-
 .../sample/complete/CompletionCandidate.java    |   2 +-
 .../stream/sample/complete/PojoEvent.java       |   6 +-
 .../sample/complete/StreamingWordExtract.java   |  36 +--
 .../sample/complete/TopWikipediaSessions.java   |  70 ++---
 .../stream/sample/complete/TrafficRoutes.java   | 132 ++++----
 .../sample/complete/TwitterAutoComplete.java    |   3 +-
 .../sample/cookbook/CombinePerKeyExamples.java  |  40 +--
 .../stream/sample/cookbook/DeDupExample.java    |  24 +-
 .../stream/sample/cookbook/InputPojo.java       |  18 +-
 .../sample/cookbook/MaxPerKeyExamples.java      |  42 +--
 .../stream/sample/cookbook/OutputPojo.java      |  10 +-
 .../stream/sample/cookbook/TriggerExample.java  |   4 +-
 .../stream/sample/MinimalWordCountTest.java     |   8 +-
 .../stream/sample/WindowedWordCountTest.java    |  10 +-
 .../sample/complete/AutoCompleteTest.java       |   8 +-
 .../complete/StreamingWordExtractTest.java      |  26 +-
 .../complete/TopWikipediaSessionsTest.java      |   8 +-
 .../sample/complete/TrafficRoutesTest.java      |   8 +-
 .../cookbook/CombinePerKeyExamplesTest.java     |   8 +-
 .../sample/cookbook/DeDupExampleTest.java       |   8 +-
 .../sample/cookbook/MaxPerKeyExamplesTest.java  |  60 ++--
 .../demos/iteration/Application.java            |  23 +-
 .../demos/iteration/ApplicationTest.java        |  14 +-
 .../demos/machinedata/Application.java          |  15 +-
 .../demos/machinedata/DimensionGenerator.java   |  18 +-
 .../demos/machinedata/InputReceiver.java        |  20 +-
 .../demos/machinedata/data/MachineKey.java      |  35 ++-
 .../demos/machinedata/data/ResourceType.java    |  54 ++--
 .../operator/CalculatorOperator.java            |  25 +-
 .../operator/MachineInfoAveragingOperator.java  |  26 +-
 ...chineInfoAveragingPrerequisitesOperator.java |  18 +-
 .../operator/MachineInfoAveragingUnifier.java   |   5 +-
 .../demos/machinedata/util/Combinatorics.java   | 119 +++----
 .../demos/machinedata/util/DataTable.java       |  60 ++--
 .../machinedata/CalculatorOperatorTest.java     |  34 +-
 .../datatorrent/demos/mobile/Application.java   |  22 +-
 .../demos/mobile/PhoneEntryOperator.java        |  23 +-
 .../demos/mobile/PhoneMovementGenerator.java    |  56 ++--
 .../demos/mobile/ApplicationTest.java           |   8 +-
 .../demos/mrmonitor/Application.java            |   7 +-
 .../datatorrent/demos/mrmonitor/Constants.java  |  28 +-
 .../demos/mrmonitor/MRJobStatusOperator.java    |  50 ++-
 .../mrmonitor/MRMonitoringApplication.java      |   3 +-
 .../demos/mrmonitor/MRStatusObject.java         |  22 +-
 .../com/datatorrent/demos/mrmonitor/MRUtil.java |   7 +-
 .../demos/mrmonitor/MapToMRObjectOperator.java  |   3 +-
 .../mrmonitor/MrMonitoringApplicationTest.java  |   5 +-
 .../demos/mroperator/DateWritable.java          |  76 ++---
 .../mroperator/HdfsKeyValOutputOperator.java    |   2 +-
 .../mroperator/InvertedIndexApplication.java    |   2 +-
 .../demos/mroperator/LineIndexer.java           |  35 ++-
 .../demos/mroperator/LogCountsPerHour.java      | 313 +++++++++----------
 .../demos/mroperator/LogsCountApplication.java  |   2 +-
 .../demos/mroperator/MapOperator.java           |  83 ++---
 .../mroperator/NewWordCountApplication.java     |  20 +-
 .../demos/mroperator/OutputCollectorImpl.java   |  87 +++---
 .../demos/mroperator/ReduceOperator.java        | 266 ++++++++--------
 .../demos/mroperator/ReporterImpl.java          | 165 +++++-----
 .../datatorrent/demos/mroperator/WordCount.java |  23 +-
 .../demos/mroperator/MapOperatorTest.java       |  31 +-
 .../demos/mroperator/ReduceOperatorTest.java    |  70 +++--
 .../mroperator/WordCountMRApplicationTest.java  |   7 +-
 .../com/datatorrent/demos/pi/Application.java   |   7 +-
 .../demos/pi/ApplicationAppData.java            |   2 +-
 .../demos/pi/ApplicationWithScript.java         |  13 +-
 .../com/datatorrent/demos/pi/Calculator.java    |   4 +-
 .../datatorrent/demos/pi/NamedValueList.java    |  16 +-
 .../demos/pi/PiCalculateOperator.java           |   5 +-
 .../datatorrent/demos/pi/ApplicationTest.java   |  18 +-
 .../datatorrent/demos/pi/CalculatorTest.java    |   5 +-
 .../demos/r/oldfaithful/FaithfulRScript.java    |  16 +-
 .../demos/r/oldfaithful/InputGenerator.java     |   5 +-
 .../r/oldfaithful/OldFaithfulApplication.java   |   5 +-
 .../oldfaithful/OldFaithfulApplicationTest.java |   2 +-
 .../twitter/KinesisHashtagsApplication.java     |   8 +-
 .../demos/twitter/SlidingContainer.java         |   2 +-
 .../demos/twitter/TwitterDumpApplication.java   |   6 +-
 .../twitter/TwitterDumpHBaseApplication.java    |   8 +-
 .../twitter/TwitterStatusHashtagExtractor.java  |   8 +-
 .../twitter/TwitterStatusURLExtractor.java      |   7 +-
 .../twitter/TwitterStatusWordExtractor.java     |  16 +-
 .../twitter/TwitterTopCounterApplication.java   |  22 +-
 .../twitter/TwitterTopWordsApplication.java     |   6 +-
 .../TwitterTrendingHashtagsApplication.java     |   7 +-
 .../com/datatorrent/demos/twitter/URLSerDe.java |  10 +-
 .../demos/twitter/WindowedTopCounter.java       |  29 +-
 .../twitter/TwitterDumpApplicationTest.java     |   3 +-
 .../demos/twitter/TwitterTopCounterTest.java    |   5 +-
 .../demos/twitter/TwitterTopWordsTest.java      |  11 +-
 .../demos/uniquecount/Application.java          |   6 +-
 .../demos/uniquecount/CountVerifier.java        |  48 ++-
 .../demos/uniquecount/RandomDataGenerator.java  |  18 +-
 .../demos/uniquecount/RandomKeyValues.java      | 222 ++++++-------
 .../demos/uniquecount/RandomKeysGenerator.java  |  18 +-
 .../uniquecount/UniqueKeyValCountDemo.java      |   7 +-
 .../demos/uniquecount/ApplicationTest.java      |   4 +-
 .../demos/uniquecount/UniqueKeyValDemoTest.java |   4 +-
 .../demos/wordcount/Application.java            |  19 +-
 .../wordcount/ApplicationWithQuerySupport.java  |  33 +-
 .../demos/wordcount/FileWordCount.java          |  45 +--
 .../datatorrent/demos/wordcount/LineReader.java |   6 +-
 .../com/datatorrent/demos/wordcount/WCPair.java |  17 +-
 .../demos/wordcount/WindowWordCount.java        |   4 +-
 .../demos/wordcount/WordCountInputOperator.java | 128 ++++----
 .../demos/wordcount/WordCountWriter.java        |  10 +-
 .../datatorrent/demos/wordcount/WordReader.java |  14 +-
 .../demos/wordcount/ApplicationTest.java        |  14 +-
 .../yahoofinance/ApplicationWithDerbySQL.java   |  15 +-
 .../demos/yahoofinance/StockTickInput.java      |  50 +--
 .../yahoofinance/YahooFinanceApplication.java   |   9 +-
 .../YahooFinanceCSVInputOperator.java           |  28 +-
 .../demos/yahoofinance/ApplicationTest.java     |   5 +-
 .../ApplicationWithDerbySQLTest.java            |  12 +-
 144 files changed, 2238 insertions(+), 2102 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/CountVerifier.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/CountVerifier.java b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/CountVerifier.java
index d085744..417ed7c 100644
--- a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/CountVerifier.java
+++ b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/CountVerifier.java
@@ -39,7 +39,8 @@ public class CountVerifier implements Operator
   Map<Integer, Integer> trueCount = new HashMap<Integer, Integer>();
   Map<Integer, Integer> receivedCount = new HashMap<Integer, Integer>();
 
-  public transient final DefaultInputPort<KeyValPair<Integer, Integer>> trueIn = new DefaultInputPort<KeyValPair<Integer, Integer>>() {
+  public final transient DefaultInputPort<KeyValPair<Integer, Integer>> trueIn = new DefaultInputPort<KeyValPair<Integer, Integer>>()
+  {
     @Override
     public void process(KeyValPair<Integer, Integer> tuple)
     {
@@ -47,7 +48,8 @@ public class CountVerifier implements Operator
     }
   };
 
-  public transient final DefaultInputPort<KeyValPair<Integer, Integer>> recIn = new DefaultInputPort<KeyValPair<Integer, Integer>>() {
+  public final transient DefaultInputPort<KeyValPair<Integer, Integer>> recIn = new DefaultInputPort<KeyValPair<Integer, Integer>>()
+  {
     @Override
     public void process(KeyValPair<Integer, Integer> tuple)
     {
@@ -56,9 +58,9 @@ public class CountVerifier implements Operator
   };
 
   @OutputPortFieldAnnotation(optional = true)
-  public transient final DefaultOutputPort<Integer> successPort = new DefaultOutputPort<Integer>();
+  public final transient DefaultOutputPort<Integer> successPort = new DefaultOutputPort<Integer>();
   @OutputPortFieldAnnotation(optional = true)
-  public transient final DefaultOutputPort<Integer> failurePort = new DefaultOutputPort<Integer>();
+  public final transient DefaultOutputPort<Integer> failurePort = new DefaultOutputPort<Integer>();
 
   @Override
   public void setup(OperatorContext arg0)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/IntegerUniqueValueCountAppender.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/IntegerUniqueValueCountAppender.java b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/IntegerUniqueValueCountAppender.java
index 28baf06..bf8a307 100644
--- a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/IntegerUniqueValueCountAppender.java
+++ b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/IntegerUniqueValueCountAppender.java
@@ -42,38 +42,39 @@ public class IntegerUniqueValueCountAppender extends UniqueValueCountAppender<In
   {
     Set<Integer> valSet = new HashSet<Integer>();
     try {
-      while (resultSet.next())
+      while (resultSet.next()) {
         valSet.add(resultSet.getInt(1));
+      }
       return valSet;
     } catch (SQLException e) {
       throw new RuntimeException("while processing the result set", e);
     }
   }
-  
+
   @Override
   protected void prepareGetStatement(PreparedStatement getStatement, Object key) throws SQLException
   {
-    getStatement.setInt(1, (Integer) key);
+    getStatement.setInt(1, (Integer)key);
   }
 
   @Override
   protected void preparePutStatement(PreparedStatement putStatement, Object key, Object value) throws SQLException
   {
     @SuppressWarnings("unchecked")
-    Set<Integer> valueSet = (Set<Integer>) value;
+    Set<Integer> valueSet = (Set<Integer>)value;
     for (Integer val : valueSet) {
       @SuppressWarnings("unchecked")
-      Set<Integer> currentVals = (Set<Integer>) get(key);
+      Set<Integer> currentVals = (Set<Integer>)get(key);
       if (!currentVals.contains(val)) {
         batch = true;
-        putStatement.setInt(1, (Integer) key);
+        putStatement.setInt(1, (Integer)key);
         putStatement.setInt(2, val);
         putStatement.setLong(3, windowID);
         putStatement.addBatch();
       }
     }
   }
-  
+
   @Override
   public void endWindow()
   {
@@ -84,7 +85,7 @@ public class IntegerUniqueValueCountAppender extends UniqueValueCountAppender<In
       while (resultSet.next()) {
         int val = resultSet.getInt(1);
         @SuppressWarnings("unchecked")
-        Set<Integer> valSet = (Set<Integer>) cacheManager.get(val);
+        Set<Integer> valSet = (Set<Integer>)cacheManager.get(val);
         output.emit(new KeyValPair<Object, Object>(val, valSet.size()));
       }
     } catch (SQLException e) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/RandomKeyValGenerator.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/RandomKeyValGenerator.java b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/RandomKeyValGenerator.java
index bb12063..c8016da 100644
--- a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/RandomKeyValGenerator.java
+++ b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/RandomKeyValGenerator.java
@@ -24,7 +24,8 @@ import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 
-import org.slf4j.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
@@ -75,8 +76,9 @@ public class RandomKeyValGenerator implements InputOperator
         verport.emit(new KeyHashValPair<Integer, Integer>(e.getKey(), e.getValue().size()));
       }
     }
-    if(clearHistory)
+    if (clearHistory) {
       valhistory.clear();
+    }
   }
 
   @Override
@@ -130,7 +132,7 @@ public class RandomKeyValGenerator implements InputOperator
 
   /**
    * Sets the number of possible keys to numKeys
-   * 
+   *
    * @param numKeys
    *          the new number of possible keys
    */
@@ -141,7 +143,7 @@ public class RandomKeyValGenerator implements InputOperator
 
   /**
    * Returns the number of possible values that can be emitted
-   * 
+   *
    * @return the number of possible values that can be emitted
    */
   public int getNumVals()
@@ -151,7 +153,7 @@ public class RandomKeyValGenerator implements InputOperator
 
   /**
    * Sets the number of possible values that can be emitted to numVals
-   * 
+   *
    * @param numVals
    *          the number of possible values that can be emitted
    */
@@ -162,7 +164,7 @@ public class RandomKeyValGenerator implements InputOperator
 
   /**
    * Sets the number of KeyValPairs to be emitted to tupleBlast
-   * 
+   *
    * @param tupleBlast
    *          the new number of KeyValPairs to be emitted
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/UniqueValueCountAppender.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/UniqueValueCountAppender.java b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/UniqueValueCountAppender.java
index 7c91f77..3f14e0f 100644
--- a/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/UniqueValueCountAppender.java
+++ b/demos/distributedistinct/src/main/java/com/datatorrent/demos/distributeddistinct/UniqueValueCountAppender.java
@@ -29,19 +29,18 @@ import java.util.Set;
 import javax.annotation.Nonnull;
 import javax.validation.constraints.Min;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.algo.UniqueValueCount;
-import com.datatorrent.lib.algo.UniqueValueCount.InternalCountOutput;
-import com.datatorrent.lib.db.jdbc.JDBCLookupCacheBackedOperator;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultPartition;
 import com.datatorrent.api.Partitioner;
+import com.datatorrent.lib.algo.UniqueValueCount;
+import com.datatorrent.lib.algo.UniqueValueCount.InternalCountOutput;
+import com.datatorrent.lib.db.jdbc.JDBCLookupCacheBackedOperator;
 import com.datatorrent.netlet.util.DTThrowable;
 
 /**
@@ -106,8 +105,7 @@ public abstract class UniqueValueCountAppender<V> extends JDBCLookupCacheBackedO
           deleteStatement.executeUpdate();
         }
       }
-    }
-    catch (SQLException e) {
+    } catch (SQLException e) {
       throw new RuntimeException(e);
     }
   }
@@ -118,7 +116,7 @@ public abstract class UniqueValueCountAppender<V> extends JDBCLookupCacheBackedO
 
     Object key = getKeyFromTuple(tuple);
     @SuppressWarnings("unchecked")
-    Set<Object> values = (Set<Object>) cacheManager.get(key);
+    Set<Object> values = (Set<Object>)cacheManager.get(key);
     if (values == null) {
       values = Sets.newHashSet();
     }
@@ -154,8 +152,7 @@ public abstract class UniqueValueCountAppender<V> extends JDBCLookupCacheBackedO
         putStatement.executeBatch();
         putStatement.clearBatch();
       }
-    }
-    catch (SQLException e) {
+    } catch (SQLException e) {
       throw new RuntimeException("while executing insert", e);
     }
   }
@@ -210,8 +207,7 @@ public abstract class UniqueValueCountAppender<V> extends JDBCLookupCacheBackedO
         UniqueValueCountAppender<V> statefulUniqueCount = this.getClass().newInstance();
         DefaultPartition<UniqueValueCountAppender<V>> partition = new DefaultPartition<UniqueValueCountAppender<V>>(statefulUniqueCount);
         newPartitions.add(partition);
-      }
-      catch (Throwable cause) {
+      } catch (Throwable cause) {
         DTThrowable.rethrow(cause);
       }
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/ApplicationTest.java b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/ApplicationTest.java
index 3ebfcb1..ef5473f 100644
--- a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/ApplicationTest.java
+++ b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/ApplicationTest.java
@@ -21,7 +21,6 @@ package com.datatorrent.demos.distributeddistinct;
 import org.junit.Test;
 
 import com.datatorrent.api.LocalMode;
-import com.datatorrent.demos.distributeddistinct.Application;
 
 public class ApplicationTest
 {
@@ -30,4 +29,4 @@ public class ApplicationTest
   {
     LocalMode.runApp(new Application(), 15000);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/DistributedDistinctTest.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/DistributedDistinctTest.java b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/DistributedDistinctTest.java
index d32047a..e013217 100644
--- a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/DistributedDistinctTest.java
+++ b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/DistributedDistinctTest.java
@@ -48,8 +48,8 @@ public class DistributedDistinctTest
 {
   private static final Logger logger = LoggerFactory.getLogger(DistributedDistinctTest.class);
 
-  private final static String APP_ID = "DistributedDistinctTest";
-  private final static int OPERATOR_ID = 0;
+  private static final String APP_ID = "DistributedDistinctTest";
+  private static final int OPERATOR_ID = 0;
 
   public static final String INMEM_DB_URL = "jdbc:hsqldb:mem:test;sql.syntax_mys=true";
   public static final String INMEM_DB_DRIVER = "org.hsqldb.jdbc.JDBCDriver";

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulApplicationTest.java b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulApplicationTest.java
index ee7c1f1..57ac964 100644
--- a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulApplicationTest.java
+++ b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulApplicationTest.java
@@ -24,18 +24,19 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.Properties;
 
-import org.apache.hadoop.conf.Configuration;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.LocalMode;
-import com.datatorrent.demos.distributeddistinct.StatefulApplication;
 
 public class StatefulApplicationTest
 {
-  
+
   @BeforeClass
-  public static void setup(){
+  public static void setup()
+  {
     try {
       Class.forName(StatefulUniqueCountTest.INMEM_DB_DRIVER).newInstance();
       Connection con = DriverManager.getConnection(StatefulUniqueCountTest.INMEM_DB_URL, new Properties());
@@ -51,27 +52,27 @@ public class StatefulApplicationTest
       throw new RuntimeException(e);
     }
   }
-  
+
   @Test
   public void testApplication() throws Exception
   {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
-    conf.set("dt.operator.StatefulUniqueCounter.prop.tableName", "Test_Lookup_Cache"); 
+    conf.set("dt.operator.StatefulUniqueCounter.prop.tableName", "Test_Lookup_Cache");
     conf.set("dt.operator.StatefulUniqueCounter.prop.store.dbUrl", "jdbc:hsqldb:mem:test;sql.syntax_mys=true");
     conf.set("dt.operator.StatefulUniqueCounter.prop.store.dbDriver", "org.hsqldb.jdbcDriver");
-      
+
     lma.prepareDAG(new StatefulApplication(), conf);
     lma.cloneDAG();
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(false);
     lc.runAsync();
-    
+
     long now = System.currentTimeMillis();
     while (System.currentTimeMillis() - now < 15000) {
       Thread.sleep(1000);
     }
-    
+
     lc.shutdown();
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulUniqueCountTest.java
----------------------------------------------------------------------
diff --git a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulUniqueCountTest.java b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulUniqueCountTest.java
index 55f1c8e..a1ac603 100644
--- a/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulUniqueCountTest.java
+++ b/demos/distributedistinct/src/test/java/com/datatorrent/demos/distributeddistinct/StatefulUniqueCountTest.java
@@ -18,20 +18,29 @@
  */
 package com.datatorrent.demos.distributeddistinct;
 
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Properties;
 
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.datatorrent.api.*;
+import org.apache.hadoop.conf.Configuration;
+
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.api.LocalMode;
+import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.demos.distributeddistinct.IntegerUniqueValueCountAppender;
 
 import com.datatorrent.lib.algo.UniqueValueCount;
 import com.datatorrent.lib.util.KeyValPair;
@@ -96,7 +105,8 @@ public class StatefulUniqueCountTest
     private static final String INMEM_DB_DRIVER = "org.hsqldb.jdbc.JDBCDriver";
     protected static final String TABLE_NAME = "Test_Lookup_Cache";
 
-    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>() {
+    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
+    {
       @Override
       public void process(Object tuple)
       {
@@ -196,7 +206,8 @@ public class StatefulUniqueCountTest
   }
 
   @BeforeClass
-  public static void setup(){
+  public static void setup()
+  {
     try {
       Class.forName(INMEM_DB_DRIVER).newInstance();
       Connection con = DriverManager.getConnection(INMEM_DB_URL, new Properties());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/Application.java
----------------------------------------------------------------------
diff --git a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/Application.java b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/Application.java
index 74a8f99..90a3fd2 100644
--- a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/Application.java
+++ b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/Application.java
@@ -27,7 +27,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
 /**
  * @since 2.1.0
  */
-@ApplicationAnnotation(name="EchoServer")
+@ApplicationAnnotation(name = "EchoServer")
 public class Application implements StreamingApplication
 {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/MessageResponder.java
----------------------------------------------------------------------
diff --git a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/MessageResponder.java b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/MessageResponder.java
index 7b8b423..ce7a1bc 100644
--- a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/MessageResponder.java
+++ b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/MessageResponder.java
@@ -23,9 +23,9 @@ import java.net.SocketAddress;
 import java.nio.ByteBuffer;
 import java.nio.channels.DatagramChannel;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * @since 2.1.0
@@ -40,7 +40,7 @@ public class MessageResponder extends BaseOperator
   private transient NetworkManager.ChannelAction<DatagramChannel> action;
   private transient ByteBuffer buffer;
 
-  public transient final DefaultInputPort<Message> messageInput = new DefaultInputPort<Message>()
+  public final transient DefaultInputPort<Message> messageInput = new DefaultInputPort<Message>()
   {
     @Override
     public void process(Message message)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/NetworkManager.java
----------------------------------------------------------------------
diff --git a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/NetworkManager.java b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/NetworkManager.java
index a89e09d..056068f 100644
--- a/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/NetworkManager.java
+++ b/demos/echoserver/src/main/java/com/datatorrent/demos/echoserver/NetworkManager.java
@@ -23,8 +23,15 @@ import java.net.DatagramSocket;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.net.SocketAddress;
-import java.nio.channels.*;
-import java.util.*;
+import java.nio.channels.DatagramChannel;
+import java.nio.channels.SelectableChannel;
+import java.nio.channels.SelectionKey;
+import java.nio.channels.Selector;
+import java.nio.channels.SocketChannel;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
 import org.slf4j.Logger;
@@ -37,7 +44,11 @@ public class NetworkManager implements Runnable
 {
   private static final Logger logger = LoggerFactory.getLogger(NetworkManager.class);
 
-  public static enum ConnectionType { TCP, UDP };
+  public static enum ConnectionType
+  {
+    TCP,
+    UDP
+  }
 
   private static NetworkManager _instance;
   private Selector selector;
@@ -180,36 +191,48 @@ public class NetworkManager implements Runnable
     }
   }
 
-  public static interface ChannelListener<T extends SelectableChannel> {
+  public static interface ChannelListener<T extends SelectableChannel>
+  {
     public void ready(ChannelAction<T> action, int readyOps);
   }
 
-  public static class ChannelConfiguration<T extends SelectableChannel> {
+  public static class ChannelConfiguration<T extends SelectableChannel>
+  {
     public T channel;
     public ConnectionInfo connectionInfo;
     public Collection<ChannelAction> actions;
   }
 
-  public static class ChannelAction<T extends SelectableChannel> {
+  public static class ChannelAction<T extends SelectableChannel>
+  {
     public ChannelConfiguration<T> channelConfiguration;
     public ChannelListener<T> listener;
     public int ops;
   }
 
-  private static class ConnectionInfo {
+  private static class ConnectionInfo
+  {
     public SocketAddress address;
     public ConnectionType connectionType;
 
     @Override
     public boolean equals(Object o)
     {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
+      if (this == o) {
+        return true;
+      }
+      if (o == null || getClass() != o.getClass()) {
+        return false;
+      }
 
-      ConnectionInfo that = (ConnectionInfo) o;
+      ConnectionInfo that = (ConnectionInfo)o;
 
-      if (connectionType != that.connectionType) return false;
-      if (!address.equals(that.address)) return false;
+      if (connectionType != that.connectionType) {
+        return false;
+      }
+      if (!address.equals(that.address)) {
+        return false;
+      }
 
       return true;
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/echoserver/src/test/java/com/datatorrent/demos/echoserver/ApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/echoserver/src/test/java/com/datatorrent/demos/echoserver/ApplicationTest.java b/demos/echoserver/src/test/java/com/datatorrent/demos/echoserver/ApplicationTest.java
index a25e47f..8c52a9d 100644
--- a/demos/echoserver/src/test/java/com/datatorrent/demos/echoserver/ApplicationTest.java
+++ b/demos/echoserver/src/test/java/com/datatorrent/demos/echoserver/ApplicationTest.java
@@ -32,10 +32,12 @@ import com.datatorrent.api.LocalMode;
 /**
  * Test the DAG declaration in local mode.
  */
-public class ApplicationTest {
+public class ApplicationTest
+{
 
   @Test
-  public void testApplication() throws IOException, Exception {
+  public void testApplication() throws IOException, Exception
+  {
     try {
       LocalMode lma = LocalMode.newInstance();
       Configuration conf = new Configuration(false);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/Application.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/Application.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/Application.java
index c28697e..8d7c325 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/Application.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/Application.java
@@ -26,7 +26,6 @@ import com.datatorrent.api.Context.DAGContext;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.api.annotation.ApplicationAnnotation;
-import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.demos.frauddetect.operator.HdfsStringOutputOperator;
 import com.datatorrent.demos.frauddetect.operator.MongoDBOutputOperator;
 import com.datatorrent.lib.io.ConsoleOutputOperator;
@@ -36,6 +35,7 @@ import com.datatorrent.lib.math.RangeKeyVal;
 import com.datatorrent.lib.multiwindow.SimpleMovingAverage;
 import com.datatorrent.lib.util.BaseKeyValueOperator;
 import com.datatorrent.lib.util.KeyValPair;
+import com.datatorrent.netlet.util.DTThrowable;
 
 
 /**
@@ -43,11 +43,10 @@ import com.datatorrent.lib.util.KeyValPair;
  *
  * @since 0.9.0
  */
-@ApplicationAnnotation(name="FraudDetectDemo")
+@ApplicationAnnotation(name = "FraudDetectDemo")
 public class Application implements StreamingApplication
 {
 
-
   public PubSubWebSocketInputOperator getPubSubWebSocketInputOperator(String name, DAG dag, URI duri, String topic) throws Exception
   {
     PubSubWebSocketInputOperator reqin = dag.addOperator(name, new PubSubWebSocketInputOperator());
@@ -78,7 +77,8 @@ public class Application implements StreamingApplication
     return oper;
   }
 
-  public static class KeyPartitionCodec<K, V> extends BaseKeyValueOperator.DefaultPartitionCodec<K,V> implements Serializable {
+  public static class KeyPartitionCodec<K, V> extends BaseKeyValueOperator.DefaultPartitionCodec<K,V> implements Serializable
+  {
     private static final long serialVersionUID = 201410031623L;
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/AverageAlertingOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/AverageAlertingOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/AverageAlertingOperator.java
index 5fb665c..b813a40 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/AverageAlertingOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/AverageAlertingOperator.java
@@ -18,23 +18,25 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.lib.util.KeyValPair;
-import com.datatorrent.demos.frauddetect.util.JsonUtils;
-import org.apache.commons.lang.mutable.MutableDouble;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.validation.constraints.NotNull;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import javax.validation.constraints.NotNull;
+
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang.mutable.MutableDouble;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.demos.frauddetect.util.JsonUtils;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  * Generate an alert if the current transaction amount received on tx input port for the given key is greater by n %
@@ -45,8 +47,8 @@ import java.util.Map;
 public class AverageAlertingOperator extends BaseOperator
 {
   private static final Logger Log = LoggerFactory.getLogger(AverageAlertingOperator.class);
-  private transient final JsonFactory jsonFactory = new JsonFactory();
-  private transient final ObjectMapper mapper = new ObjectMapper(jsonFactory);
+  private final transient JsonFactory jsonFactory = new JsonFactory();
+  private final transient ObjectMapper mapper = new ObjectMapper(jsonFactory);
   private Map<MerchantKey, MutableDouble> lastSMAMap = new HashMap<MerchantKey, MutableDouble>();
   private Map<MerchantKey, MutableDouble> currentSMAMap = new HashMap<MerchantKey, MutableDouble>();
   private List<AverageAlertData> alerts = new ArrayList<AverageAlertData>();
@@ -57,7 +59,7 @@ public class AverageAlertingOperator extends BaseOperator
   public final transient DefaultOutputPort<String> avgAlertOutputPort = new DefaultOutputPort<String>();
   public final transient DefaultOutputPort<Map<String, Object>> avgAlertNotificationPort = new DefaultOutputPort<Map<String, Object>>();
   public final transient DefaultInputPort<KeyValPair<MerchantKey, Double>> smaInputPort =
-          new DefaultInputPort<KeyValPair<MerchantKey, Double>>()
+      new DefaultInputPort<KeyValPair<MerchantKey, Double>>()
   {
     @Override
     public void process(KeyValPair<MerchantKey, Double> tuple)
@@ -67,8 +69,7 @@ public class AverageAlertingOperator extends BaseOperator
         double sma = tuple.getValue();
         currentSMAMap.put(tuple.getKey(), new MutableDouble(sma));
         //lastSMAMap.put(tuple.getKey(), new MutableDouble(sma));
-      }
-      else { // move the current SMA value to the last SMA Map
+      } else { // move the current SMA value to the last SMA Map
         //lastSMAMap.get(tuple.getKey()).setValue(currentSma.getValue());
         currentSma.setValue(tuple.getValue());  // update the current SMA value
       }
@@ -76,7 +77,7 @@ public class AverageAlertingOperator extends BaseOperator
 
   };
   public final transient DefaultInputPort<KeyValPair<MerchantKey, Long>> txInputPort =
-          new DefaultInputPort<KeyValPair<MerchantKey, Long>>()
+      new DefaultInputPort<KeyValPair<MerchantKey, Long>>()
   {
     @Override
     public void process(KeyValPair<MerchantKey, Long> tuple)
@@ -100,8 +101,7 @@ public class AverageAlertingOperator extends BaseOperator
         //if (userGenerated) {   // if its user generated only the pass it to WebSocket
         if (merchantKey.merchantType == MerchantTransaction.MerchantType.BRICK_AND_MORTAR) {
           avgAlertNotificationPort.emit(getOutputData(data, String.format(brickMortarAlertMsg, txValue, change, lastSmaValue, merchantKey.merchantId, merchantKey.terminalId)));
-        }
-        else { // its internet based
+        } else { // its internet based
           avgAlertNotificationPort.emit(getOutputData(data, String.format(internetAlertMsg, txValue, change, lastSmaValue, merchantKey.merchantId)));
 
         }
@@ -116,8 +116,7 @@ public class AverageAlertingOperator extends BaseOperator
     for (AverageAlertData data : alerts) {
       try {
         avgAlertOutputPort.emit(JsonUtils.toJson(data));
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         logger.warn("Exception while converting object to JSON", e);
       }
     }
@@ -131,8 +130,7 @@ public class AverageAlertingOperator extends BaseOperator
       if (lastSma == null) {
         lastSma = new MutableDouble(currentSma.doubleValue());
         lastSMAMap.put(key, lastSma);
-      }
-      else {
+      } else {
         lastSma.setValue(currentSma.getValue());
       }
     }
@@ -167,8 +165,7 @@ public class AverageAlertingOperator extends BaseOperator
     try {
       String str = mapper.writeValueAsString(output);
       logger.debug("user generated tx alert: " + str);
-    }
-    catch (Exception exc) {
+    } catch (Exception exc) {
       //ignore
     }
     return output;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberKey.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberKey.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberKey.java
index df0aa7e..87cf043 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberKey.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberKey.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.lib.util.TimeBucketKey;
-
 import java.io.Serializable;
 
+import com.datatorrent.lib.util.TimeBucketKey;
+
 /**
  * Bank Id Number Key
  *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberSamplerOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberSamplerOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberSamplerOperator.java
index 642c702..abfa202 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberSamplerOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/BankIdNumberSamplerOperator.java
@@ -18,23 +18,26 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.lib.util.KeyValPair;
-import com.datatorrent.demos.frauddetect.util.JsonUtils;
-import org.apache.commons.lang.mutable.MutableLong;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.map.ObjectMapper;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang.mutable.MutableLong;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.demos.frauddetect.util.JsonUtils;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * Count the transactions for the underlying aggregation window if the same BIN is
  * being used for more than defined number of transactions. Output the data as needed
@@ -44,19 +47,19 @@ import org.codehaus.jackson.map.ObjectMapper;
  */
 public class BankIdNumberSamplerOperator extends BaseOperator
 {
-  private transient final JsonFactory jsonFactory = new JsonFactory();
-  private transient final ObjectMapper mapper = new ObjectMapper(jsonFactory);
+  private final transient JsonFactory jsonFactory = new JsonFactory();
+  private final transient ObjectMapper mapper = new ObjectMapper(jsonFactory);
   private int threshold;
   private Map<MerchantKey, Map<String, BankIdNumData>> bankIdNumCountMap = new HashMap<MerchantKey, Map<String, BankIdNumData>>();
   private static final String ALERT_MSG =
-          "Potential fraudulent CC transactions (same bank id %s and merchant %s) total transactions: %d";
+      "Potential fraudulent CC transactions (same bank id %s and merchant %s) total transactions: %d";
   /**
    * Output the key-value pair for the BIN as key with the count as value.
    */
   public final transient DefaultOutputPort<String> countAlertOutputPort =
-          new DefaultOutputPort<String>();
+      new DefaultOutputPort<String>();
   public final transient DefaultOutputPort<Map<String, Object>> countAlertNotificationPort =
-          new DefaultOutputPort<Map<String, Object>>();
+      new DefaultOutputPort<Map<String, Object>>();
 
   public int getThreshold()
   {
@@ -103,7 +106,7 @@ public class BankIdNumberSamplerOperator extends BaseOperator
   */
 
   public final transient DefaultInputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>> txCountInputPort =
-          new DefaultInputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>>()
+      new DefaultInputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>>()
   {
     @Override
     public void process(KeyValPair<KeyValPair<MerchantKey, String>, Integer> tuple)
@@ -162,8 +165,7 @@ public class BankIdNumberSamplerOperator extends BaseOperator
           try {
             countAlertOutputPort.emit(JsonUtils.toJson(data));
             countAlertNotificationPort.emit(getOutputData(data));
-          }
-          catch (IOException e) {
+          } catch (IOException e) {
             logger.warn("Exception while converting object to JSON: ", e);
           }
         }
@@ -196,15 +198,14 @@ public class BankIdNumberSamplerOperator extends BaseOperator
     try {
       String str = mapper.writeValueAsString(output);
       logger.debug("user generated tx alert: " + str);
-    }
-    catch (Exception exc) {
+    } catch (Exception exc) {
       //ignore
     }
 
     return output;
   }
 
-  public final static class BankIdNumData
+  public static final class BankIdNumData
   {
     public String bankIdNum;
     public MutableLong count = new MutableLong();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/CreditCardAmountSamplerOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/CreditCardAmountSamplerOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/CreditCardAmountSamplerOperator.java
index d728306..a232fd4 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/CreditCardAmountSamplerOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/CreditCardAmountSamplerOperator.java
@@ -18,19 +18,26 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.lib.util.KeyValPair;
-import com.datatorrent.demos.frauddetect.util.JsonUtils;
-import org.apache.commons.lang.mutable.MutableLong;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.map.ObjectMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.*;
+import org.apache.commons.lang.mutable.MutableLong;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.demos.frauddetect.util.JsonUtils;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  * An operator to alert in case a transaction of a small lowAmount is followed by a transaction which is significantly larger for a given credit card number.
@@ -42,8 +49,8 @@ import java.util.*;
  */
 public class CreditCardAmountSamplerOperator extends BaseOperator
 {
-  private transient final JsonFactory jsonFactory = new JsonFactory();
-  private transient final ObjectMapper mapper = new ObjectMapper(jsonFactory);
+  private final transient JsonFactory jsonFactory = new JsonFactory();
+  private final transient ObjectMapper mapper = new ObjectMapper(jsonFactory);
   private static final Logger logger = LoggerFactory.getLogger(Application.class);
   // Factor to be applied to existing lowAmount to flag potential alerts.
   private double threshold = 9500;
@@ -52,7 +59,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
   private List<CreditCardAlertData> alerts = new ArrayList<CreditCardAlertData>();
   //private List<CreditCardAlertData> userAlerts = new ArrayList<CreditCardAlertData>();
   private static final String ALERT_MSG =
-          "Potential fraudulent CC transactions (small one USD %d followed by large USD %d) performed using credit card: %s";
+      "Potential fraudulent CC transactions (small one USD %d followed by large USD %d) performed using credit card: %s";
   public final transient DefaultOutputPort<String> ccAlertOutputPort = new DefaultOutputPort<String>();
   /*
    public final transient DefaultOutputPort<Map<String, Object>> ccUserAlertOutputPort = new DefaultOutputPort<Map<String, Object>>();
@@ -82,8 +89,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
       if (ccAmount < currentSmallValue) {
         cardInfo.lowAmount.setValue(ccAmount);
         cardInfo.time = key.time;
-      }
-      else if (ccAmount > (currentSmallValue + threshold)) {
+      } else if (ccAmount > (currentSmallValue + threshold)) {
         // If the transaction lowAmount is > 70% of the min. lowAmount, send an alert.
 
         CreditCardAlertData data = new CreditCardAlertData();
@@ -113,8 +119,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
         // alert not resetting the low value from a user generated transaction
         //txMap.remove(fullCcNum);
       }
-    }
-    else {
+    } else {
       cardInfo = new CreditCardInfo();
       cardInfo.lowAmount.setValue(ccAmount);
       cardInfo.time = key.time;
@@ -123,7 +128,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
   }
 
   public transient DefaultInputPort<KeyValPair<MerchantKey, CreditCardData>> inputPort =
-          new DefaultInputPort<KeyValPair<MerchantKey, CreditCardData>>()
+      new DefaultInputPort<KeyValPair<MerchantKey, CreditCardData>>()
   {
     //
     // This function checks if a CC entry exists.
@@ -147,8 +152,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
     for (CreditCardAlertData data : alerts) {
       try {
         ccAlertOutputPort.emit(JsonUtils.toJson(data));
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         logger.warn("Exception while converting object to JSON", e);
       }
     }
@@ -192,8 +196,7 @@ public class CreditCardAmountSamplerOperator extends BaseOperator
     try {
       String str = mapper.writeValueAsString(output);
       logger.debug("Alert generated: " + str + " userGenerated: " + data.userGenerated);
-    }
-    catch (Exception exc) {
+    } catch (Exception exc) {
       //ignore
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransaction.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransaction.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransaction.java
index 7347790..75e279c 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransaction.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransaction.java
@@ -30,12 +30,12 @@ public class MerchantTransaction implements Serializable
   public enum MerchantType
   {
     UNDEFINED, BRICK_AND_MORTAR, INTERNET
-  };
+  }
 
   public enum TransactionType
   {
     UNDEFINED, POS
-  };
+  }
 
   public String ccNum;
   public String bankIdNum;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionBucketOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionBucketOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionBucketOperator.java
index 67ab2e9..415b7be 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionBucketOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionBucketOperator.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,19 +18,21 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.lib.util.KeyValPair;
 import java.text.DecimalFormat;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
-import java.util.Map;
 import org.apache.commons.lang.mutable.MutableDouble;
 import org.apache.commons.lang.mutable.MutableLong;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * A bucket-like operator to accept merchant transaction object and dissipate the
  * transaction amount to the further downstream operator for calculating min, max and std-deviation.
@@ -45,13 +47,13 @@ public class MerchantTransactionBucketOperator extends BaseOperator
           new DefaultOutputPort<KeyValPair<MerchantKey, String>>();
   */
   public final transient DefaultOutputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>> binCountOutputPort =
-          new DefaultOutputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>>();
+      new DefaultOutputPort<KeyValPair<KeyValPair<MerchantKey, String>, Integer>>();
   public final transient DefaultOutputPort<KeyValPair<MerchantKey, Long>> txOutputPort =
-          new DefaultOutputPort<KeyValPair<MerchantKey, Long>>();
+      new DefaultOutputPort<KeyValPair<MerchantKey, Long>>();
   public final transient DefaultOutputPort<KeyValPair<MerchantKey, CreditCardData>> ccAlertOutputPort =
-          new DefaultOutputPort<KeyValPair<MerchantKey, CreditCardData>>();
+      new DefaultOutputPort<KeyValPair<MerchantKey, CreditCardData>>();
   public final transient DefaultOutputPort<Map<String, Object>> summaryTxnOutputPort =
-          new DefaultOutputPort<Map<String, Object>>();
+      new DefaultOutputPort<Map<String, Object>>();
   private MutableLong totalTxns = new MutableLong(0);
   private MutableLong txnsInLastSecond = new MutableLong(0);
   private MutableDouble amtInLastSecond = new MutableDouble(0);
@@ -75,26 +77,26 @@ public class MerchantTransactionBucketOperator extends BaseOperator
 
   };
 
-  public void endWindow() {
-      Map<String, Object> summary = new HashMap<String, Object>();
-      double avg;
-      if (txnsInLastSecond.longValue() == 0) {
-          avg = 0;
-      } else {
-          avg = amtInLastSecond.doubleValue() / txnsInLastSecond.longValue();
-      }
-      summary.put("totalTxns", totalTxns);
-      summary.put("txnsInLastSecond", txnsInLastSecond);
-      summary.put("amtInLastSecond", amtFormatter.format(amtInLastSecond));
-      summary.put("avgAmtInLastSecond", amtFormatter.format(avg));
-      summaryTxnOutputPort.emit(summary);
-      txnsInLastSecond.setValue(0);
-      amtInLastSecond.setValue(0);
+  public void endWindow()
+  {
+    Map<String, Object> summary = new HashMap<String, Object>();
+    double avg;
+    if (txnsInLastSecond.longValue() == 0) {
+      avg = 0;
+    } else {
+      avg = amtInLastSecond.doubleValue() / txnsInLastSecond.longValue();
+    }
+    summary.put("totalTxns", totalTxns);
+    summary.put("txnsInLastSecond", txnsInLastSecond);
+    summary.put("amtInLastSecond", amtFormatter.format(amtInLastSecond));
+    summary.put("avgAmtInLastSecond", amtFormatter.format(avg));
+    summaryTxnOutputPort.emit(summary);
+    txnsInLastSecond.setValue(0);
+    amtInLastSecond.setValue(0);
   }
 
   private void processTuple(MerchantTransaction tuple)
   {
-    //emitBankIdNumTuple(tuple, binOutputPort);
     emitBankIdNumTuple(tuple, binCountOutputPort);
     emitMerchantKeyTuple(tuple, txOutputPort);
     emitCreditCardKeyTuple(tuple, ccAlertOutputPort);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionGenerator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionGenerator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionGenerator.java
index 5f9b7ee..49b61aa 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionGenerator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionGenerator.java
@@ -19,12 +19,16 @@
 package com.datatorrent.demos.frauddetect;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.api.*;
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
 
 import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.demos.frauddetect.util.JsonUtils;
@@ -37,8 +41,8 @@ import com.datatorrent.demos.frauddetect.util.JsonUtils;
 public class MerchantTransactionGenerator extends BaseOperator implements InputOperator
 {
   private final Random randomNum = new Random();
-  public static final int zipCodes[] = {94086, 94087, 94088, 94089, 94090, 94091, 94092, 94093};
-  public static final String merchantIds[] = {"Wal-Mart", "Target", "Amazon", "Apple", "Sears", "Macys", "JCPenny", "Levis"};
+  public static final int[] zipCodes = {94086, 94087, 94088, 94089, 94090, 94091, 94092, 94093};
+  public static final String[] merchantIds = {"Wal-Mart", "Target", "Amazon", "Apple", "Sears", "Macys", "JCPenny", "Levis"};
 //    public static final String bankIdNums[] = { "1111 1111 1111", "2222 2222 2222", "3333 3333 3333", "4444 4444 4444", "5555 5555 5555", "6666 6666 6666", "7777 7777 7777", "8888 8888 8888"};
 //    public static final String ccNums[] = { "0001", "0002", "0003", "0004", "0005", "0006", "0007", "0008"};
 //    public static final String bankIdNums[] = { "1111 1111 1111", "2222 2222 2222", "3333 3333 3333", "4444 4444 4444"};
@@ -67,9 +71,9 @@ public class MerchantTransactionGenerator extends BaseOperator implements InputO
   }
 
   public transient DefaultOutputPort<MerchantTransaction> txOutputPort =
-          new DefaultOutputPort<MerchantTransaction>();
+      new DefaultOutputPort<MerchantTransaction>();
   public transient DefaultOutputPort<String> txDataOutputPort =
-          new DefaultOutputPort<String>();
+      new DefaultOutputPort<String>();
 
   @Override
   public void emitTuples()
@@ -127,8 +131,7 @@ public class MerchantTransactionGenerator extends BaseOperator implements InputO
     for (MerchantTransaction txData : txList) {
       try {
         txDataOutputPort.emit(JsonUtils.toJson(txData));
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         logger.warn("Exception while converting object to JSON", e);
       }
     }
@@ -136,8 +139,7 @@ public class MerchantTransactionGenerator extends BaseOperator implements InputO
 
     try {
       Thread.sleep(100);
-    }
-    catch (InterruptedException e) {
+    } catch (InterruptedException e) {
       e.printStackTrace();
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionInputHandler.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionInputHandler.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionInputHandler.java
index 0af8836..cdc829d 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionInputHandler.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/MerchantTransactionInputHandler.java
@@ -18,13 +18,14 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
+import java.util.Map;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Map;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Common utility class that can be used by all other operators to handle user input
@@ -41,7 +42,7 @@ public class MerchantTransactionInputHandler extends BaseOperator
   public static final String KEY_ZIP_CODE = "zipCode";
   public static final String KEY_AMOUNT = "amount";
   public transient DefaultOutputPort<MerchantTransaction> txOutputPort =
-          new DefaultOutputPort<MerchantTransaction>();
+      new DefaultOutputPort<MerchantTransaction>();
   public transient DefaultInputPort<Map<String, String>> userTxInputPort = new DefaultInputPort<Map<String, String>>()
   {
     @Override
@@ -49,8 +50,7 @@ public class MerchantTransactionInputHandler extends BaseOperator
     {
       try {
         txOutputPort.emit(processInput(tuple));
-      }
-      catch (Exception exc) {
+      } catch (Exception exc) {
         logger.error("Exception while handling the input", exc);
       }
     }
@@ -86,8 +86,7 @@ public class MerchantTransactionInputHandler extends BaseOperator
       }
     }
 
-    if (bankIdNum == null || ccNum == null || merchantId == null
-            || terminalId == null || zipCode == null || amount == null) {
+    if (bankIdNum == null || ccNum == null || merchantId == null || terminalId == null || zipCode == null || amount == null) {
       throw new IllegalArgumentException("Missing required input!");
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumKeyVal.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumKeyVal.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumKeyVal.java
index 056967b..2701c14 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumKeyVal.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumKeyVal.java
@@ -20,11 +20,12 @@ package com.datatorrent.demos.frauddetect;
 
 import java.util.ArrayList;
 
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+
 import com.datatorrent.lib.multiwindow.AbstractSlidingWindowKeyVal;
 import com.datatorrent.lib.util.KeyValPair;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 
 /**
  * Sliding window sum operator
@@ -34,27 +35,27 @@ import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 public class SlidingWindowSumKeyVal<K, V extends Number> extends AbstractSlidingWindowKeyVal<K, V, SlidingWindowSumObject>
 {
 
-  	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Double.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleSum = new DefaultOutputPort<KeyValPair<K, Double>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Float.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Float>> floatSum = new DefaultOutputPort<KeyValPair<K, Float>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Long.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Long>> longSum = new DefaultOutputPort<KeyValPair<K, Long>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as
-	 * Integer.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Integer>> integerSum = new DefaultOutputPort<KeyValPair<K, Integer>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Double.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleSum = new DefaultOutputPort<KeyValPair<K, Double>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Float.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Float>> floatSum = new DefaultOutputPort<KeyValPair<K, Float>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Long.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Long>> longSum = new DefaultOutputPort<KeyValPair<K, Long>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as
+   * Integer.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Integer>> integerSum = new DefaultOutputPort<KeyValPair<K, Integer>>();
 
 
   @Override
@@ -69,7 +70,6 @@ public class SlidingWindowSumKeyVal<K, V extends Number> extends AbstractSliding
       }
       buffer.put(key, stateList);
     }
-
     SlidingWindowSumObject state = stateList.get(currentstate);
     state.add(tuple.getValue());
   }
@@ -78,7 +78,7 @@ public class SlidingWindowSumKeyVal<K, V extends Number> extends AbstractSliding
   public void emitTuple(K key, ArrayList<SlidingWindowSumObject> obj)
   {
     double sum = 0;
-    for (int i=0; i < obj.size(); ++i) {
+    for (int i = 0; i < obj.size(); ++i) {
       SlidingWindowSumObject state = obj.get(i);
       sum += state.getSum();
     }
@@ -86,13 +86,13 @@ public class SlidingWindowSumKeyVal<K, V extends Number> extends AbstractSliding
       doubleSum.emit(new KeyValPair<K, Double>(key, sum));
     }
     if (floatSum.isConnected()) {
-      floatSum.emit(new KeyValPair<K, Float>(key, (float) sum));
+      floatSum.emit(new KeyValPair<K, Float>(key, (float)sum));
     }
     if (longSum.isConnected()) {
-      longSum.emit(new KeyValPair<K, Long>(key, (long) sum));
+      longSum.emit(new KeyValPair<K, Long>(key, (long)sum));
     }
     if (integerSum.isConnected()) {
-      integerSum.emit(new KeyValPair<K, Integer>(key, (int) sum));
+      integerSum.emit(new KeyValPair<K, Integer>(key, (int)sum));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumObject.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumObject.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumObject.java
index 2075165..3fefb66 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumObject.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/SlidingWindowSumObject.java
@@ -33,12 +33,14 @@ public class SlidingWindowSumObject extends SimpleMovingAverageObject
 
   MutableDouble sum = new MutableDouble(0);
 
-  public void add(Number n) {
+  public void add(Number n)
+  {
     sum.add(n);
   }
 
   @Override
-  public double getSum() {
+  public double getSum()
+  {
     return sum.doubleValue();
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/TransactionStatsAggregator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/TransactionStatsAggregator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/TransactionStatsAggregator.java
index 4a90618..e226af0 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/TransactionStatsAggregator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/TransactionStatsAggregator.java
@@ -18,18 +18,19 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.common.util.BaseOperator;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.demos.frauddetect.util.JsonUtils;
 import com.datatorrent.lib.util.HighLow;
 import com.datatorrent.lib.util.KeyValPair;
-import com.datatorrent.demos.frauddetect.util.JsonUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
 
 /**
  * Operator to aggregate the min, max, sma, std-dev and variance for the given key.
@@ -39,10 +40,10 @@ import java.util.Map;
 public class TransactionStatsAggregator extends BaseOperator
 {
   public Map<MerchantKey, TransactionStatsData> aggrgateMap =
-          new HashMap<MerchantKey, TransactionStatsData>();
+      new HashMap<MerchantKey, TransactionStatsData>();
   public final transient DefaultOutputPort<String> txDataOutputPort = new DefaultOutputPort<String>();
   public final transient DefaultInputPort<KeyValPair<MerchantKey, HighLow<Long>>> rangeInputPort =
-          new DefaultInputPort<KeyValPair<MerchantKey, HighLow<Long>>>()
+      new DefaultInputPort<KeyValPair<MerchantKey, HighLow<Long>>>()
   {
     @Override
     public void process(KeyValPair<MerchantKey, HighLow<Long>> tuple)
@@ -55,7 +56,7 @@ public class TransactionStatsAggregator extends BaseOperator
 
   };
   public final transient DefaultInputPort<KeyValPair<MerchantKey, Long>> smaInputPort =
-          new DefaultInputPort<KeyValPair<MerchantKey, Long>>()
+      new DefaultInputPort<KeyValPair<MerchantKey, Long>>()
   {
     @Override
     public void process(KeyValPair<MerchantKey, Long> tuple)
@@ -87,8 +88,7 @@ public class TransactionStatsAggregator extends BaseOperator
     for (Map.Entry<MerchantKey, TransactionStatsData> entry : aggrgateMap.entrySet()) {
       try {
         txDataOutputPort.emit(JsonUtils.toJson(entry.getValue()));
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         logger.warn("Exception while converting object to JSON", e);
       }
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/HdfsStringOutputOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/HdfsStringOutputOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/HdfsStringOutputOperator.java
index 86fa921..4b8f851 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/HdfsStringOutputOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/HdfsStringOutputOperator.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.demos.frauddetect.operator;
 
+import java.io.File;
+
 import com.datatorrent.api.Context.DAGContext;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.lib.io.fs.AbstractFileOutputOperator;
 
-import java.io.File;
-
 /**
  * Adapter for writing Strings to HDFS
  * <p>
@@ -65,8 +65,7 @@ public class HdfsStringOutputOperator extends AbstractFileOutputOperator<String>
   }
 
   @Override
-  public String getPartFileName(String fileName,
-                                int part)
+  public String getPartFileName(String fileName, int part)
   {
     return fileName + part;
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/MongoDBOutputOperator.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/MongoDBOutputOperator.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/MongoDBOutputOperator.java
index a4a2775..0171c00 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/MongoDBOutputOperator.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/operator/MongoDBOutputOperator.java
@@ -18,146 +18,171 @@
  */
 package com.datatorrent.demos.frauddetect.operator;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.Context;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.List;
 
 import javax.validation.constraints.NotNull;
 
-import com.datatorrent.api.DefaultInputPort;
-import com.mongodb.*;
-import com.mongodb.util.JSON;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.List;
+import com.mongodb.DB;
+import com.mongodb.DBCollection;
+import com.mongodb.DBObject;
+import com.mongodb.MongoClient;
+import com.mongodb.WriteConcern;
+import com.mongodb.WriteResult;
+import com.mongodb.util.JSON;
+
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 
 /**
  * Operator to write data into MongoDB
  *
  * @since 0.9.0
  */
-public class MongoDBOutputOperator extends BaseOperator {
-
-    @NotNull
-    protected String hostName;
-    @NotNull
-    protected String dataBase;
-    @NotNull
-    protected String collection;
-
-    protected WriteConcern writeConcern = WriteConcern.ACKNOWLEDGED;
-
-    protected String userName;
-    protected String passWord;
-
-    protected transient MongoClient mongoClient;
-    protected transient DB db;
-    protected transient DBCollection dbCollection;
-
-    protected List<DBObject> dataList = new ArrayList<DBObject>();
-
-    public MongoDBOutputOperator() {
-    }
-
-    /**
-     * Take the JSON formatted string and convert it to DBObject
-     */
-    public transient final DefaultInputPort<String> inputPort = new DefaultInputPort<String>() {
-        @Override
-        public void process(String tuple) {
-            dataList.add((DBObject)JSON.parse(tuple));
-        }
-    };
-
-    @Override
-    public void setup(Context.OperatorContext context) {
-        super.setup(context);
-        try {
-            mongoClient = new MongoClient(hostName);
-            db = mongoClient.getDB(dataBase);
-            if (userName != null && passWord != null) {
-                if (!db.authenticate(userName, passWord.toCharArray())) {
-                    throw new IllegalArgumentException("MongoDB authentication failed. Illegal username and password for MongoDB!!");
-                }
-            }
-            dbCollection = db.getCollection(collection);
-        }
-        catch (UnknownHostException ex) {
-            logger.debug(ex.toString());
-        }
-    }
-
-    @Override
-    public void beginWindow(long windowId) {
-        // nothing
-    }
-
-    @Override
-    public void endWindow() {
-        logger.debug("mongo datalist size: " + dataList.size());
-        if (dataList.size() > 0) {
-            WriteResult result = dbCollection.insert(dataList, writeConcern);
-            logger.debug("Result for MongoDB insert: " + result);
-            dataList.clear();
-        }
-    }
-
+public class MongoDBOutputOperator extends BaseOperator
+{
+  @NotNull
+  protected String hostName;
+  @NotNull
+  protected String dataBase;
+  @NotNull
+  protected String collection;
+
+  protected WriteConcern writeConcern = WriteConcern.ACKNOWLEDGED;
+
+  protected String userName;
+  protected String passWord;
+
+  protected transient MongoClient mongoClient;
+  protected transient DB db;
+  protected transient DBCollection dbCollection;
+
+  protected List<DBObject> dataList = new ArrayList<DBObject>();
+
+  public MongoDBOutputOperator()
+  {
+  }
+
+  /**
+   * Take the JSON formatted string and convert it to DBObject
+   */
+  public final transient DefaultInputPort<String> inputPort = new DefaultInputPort<String>()
+  {
     @Override
-    public void teardown() {
-        if (mongoClient != null) {
-            mongoClient.close();
+    public void process(String tuple)
+    {
+      dataList.add((DBObject)JSON.parse(tuple));
+    }
+  };
+
+  @Override
+  public void setup(Context.OperatorContext context)
+  {
+    super.setup(context);
+    try {
+      mongoClient = new MongoClient(hostName);
+      db = mongoClient.getDB(dataBase);
+      if (userName != null && passWord != null) {
+        if (!db.authenticate(userName, passWord.toCharArray())) {
+          throw new IllegalArgumentException("MongoDB authentication failed. Illegal username and password for MongoDB!!");
         }
-    }
-
-    public String getHostName() {
-        return hostName;
-    }
-
-    public void setHostName(String hostName) {
-        this.hostName = hostName;
-    }
-
-    public String getDataBase() {
-        return dataBase;
-    }
-
-    public void setDataBase(String dataBase) {
-        this.dataBase = dataBase;
-    }
-
-    public String getCollection() {
-        return collection;
-    }
-
-    public void setCollection(String collection) {
-        this.collection = collection;
-    }
-
-    public String getUserName() {
-        return userName;
-    }
-
-    public void setUserName(String userName) {
-        this.userName = userName;
-    }
-
-    public String getPassWord() {
-        return passWord;
-    }
-
-    public void setPassWord(String passWord) {
-        this.passWord = passWord;
-    }
-
-    public WriteConcern getWriteConcern() {
-        return writeConcern;
-    }
-
-    public void setWriteConcern(WriteConcern writeConcern) {
-        this.writeConcern = writeConcern;
-    }
-
-    private static final Logger logger = LoggerFactory.getLogger(MongoDBOutputOperator.class);
+      }
+      dbCollection = db.getCollection(collection);
+    } catch (UnknownHostException ex) {
+      logger.debug(ex.toString());
+    }
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+    // nothing
+  }
+
+  @Override
+  public void endWindow()
+  {
+    logger.debug("mongo datalist size: " + dataList.size());
+    if (dataList.size() > 0) {
+      WriteResult result = dbCollection.insert(dataList, writeConcern);
+      logger.debug("Result for MongoDB insert: " + result);
+      dataList.clear();
+    }
+  }
+
+  @Override
+  public void teardown()
+  {
+    if (mongoClient != null) {
+      mongoClient.close();
+    }
+  }
+
+  public String getHostName()
+  {
+    return hostName;
+  }
+
+  public void setHostName(String hostName)
+  {
+    this.hostName = hostName;
+  }
+
+  public String getDataBase()
+  {
+    return dataBase;
+  }
+
+  public void setDataBase(String dataBase)
+  {
+    this.dataBase = dataBase;
+  }
+
+  public String getCollection()
+  {
+    return collection;
+  }
+
+  public void setCollection(String collection)
+  {
+    this.collection = collection;
+  }
+
+  public String getUserName()
+  {
+    return userName;
+  }
+
+  public void setUserName(String userName)
+  {
+    this.userName = userName;
+  }
+
+  public String getPassWord()
+  {
+    return passWord;
+  }
+
+  public void setPassWord(String passWord)
+  {
+    this.passWord = passWord;
+  }
+
+  public WriteConcern getWriteConcern()
+  {
+    return writeConcern;
+  }
+
+  public void setWriteConcern(WriteConcern writeConcern)
+  {
+    this.writeConcern = writeConcern;
+  }
+
+  private static final Logger logger = LoggerFactory.getLogger(MongoDBOutputOperator.class);
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/util/JsonUtils.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/util/JsonUtils.java b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/util/JsonUtils.java
index 1eb87ed..60c200f 100644
--- a/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/util/JsonUtils.java
+++ b/demos/frauddetect/src/main/java/com/datatorrent/demos/frauddetect/util/JsonUtils.java
@@ -18,20 +18,21 @@
  */
 package com.datatorrent.demos.frauddetect.util;
 
-import org.codehaus.jackson.map.ObjectMapper;
-
 import java.io.IOException;
 
+import org.codehaus.jackson.map.ObjectMapper;
+
 /**
  * Utility class to deal with JSON and Object
  *
  * @since 0.9.0
  */
-public class JsonUtils {
-
-    private static final ObjectMapper mapper = new ObjectMapper();
+public class JsonUtils
+{
+  private static final ObjectMapper mapper = new ObjectMapper();
 
-    public static String toJson(Object obj) throws IOException {
-        return mapper.writeValueAsString(obj);
-    }
+  public static String toJson(Object obj) throws IOException
+  {
+    return mapper.writeValueAsString(obj);
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/frauddetect/src/test/java/com/datatorrent/demos/frauddetect/FrauddetectApplicationTest.java
----------------------------------------------------------------------
diff --git a/demos/frauddetect/src/test/java/com/datatorrent/demos/frauddetect/FrauddetectApplicationTest.java b/demos/frauddetect/src/test/java/com/datatorrent/demos/frauddetect/FrauddetectApplicationTest.java
index b54ee83..ef1f371 100644
--- a/demos/frauddetect/src/test/java/com/datatorrent/demos/frauddetect/FrauddetectApplicationTest.java
+++ b/demos/frauddetect/src/test/java/com/datatorrent/demos/frauddetect/FrauddetectApplicationTest.java
@@ -18,33 +18,33 @@
  */
 package com.datatorrent.demos.frauddetect;
 
-import com.datatorrent.api.LocalMode;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
+import org.apache.hadoop.conf.Configuration;
+import com.datatorrent.api.LocalMode;
 
 /**
  * Fraud detection application test
  */
-public class FrauddetectApplicationTest {
+public class FrauddetectApplicationTest
+{
 
-    public FrauddetectApplicationTest() {
-    }
+  public FrauddetectApplicationTest()
+  {
+  }
 
-    @Test
-    public void testApplication() throws Exception {
-      try
-      {
-        Application application = new Application();
-        Configuration conf = new Configuration(false);
-        conf.addResource("dt-site-frauddetect.xml");
-        LocalMode lma = LocalMode.newInstance();
-        lma.prepareDAG(application, conf);
-        lma.getController().run(120000);
-      }
-      catch(Exception e)
-      {
-        e.printStackTrace();
-      }
+  @Test
+  public void testApplication() throws Exception
+  {
+    try {
+      Application application = new Application();
+      Configuration conf = new Configuration(false);
+      conf.addResource("dt-site-frauddetect.xml");
+      LocalMode lma = LocalMode.newInstance();
+      lma.prepareDAG(application, conf);
+      lma.getController().run(120000);
+    } catch (Exception e) {
+      e.printStackTrace();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/7d9386d2/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/MinimalWordCount.java
----------------------------------------------------------------------
diff --git a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/MinimalWordCount.java b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/MinimalWordCount.java
index 671cc72..21afc5b 100644
--- a/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/MinimalWordCount.java
+++ b/demos/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/MinimalWordCount.java
@@ -49,19 +49,19 @@ public class MinimalWordCount implements StreamingApplication
   {
     static Map<String, Long> result;
     private static boolean done = false;
-  
+
     public static boolean isDone()
     {
       return done;
     }
-  
+
     @Override
     public void setup(Context.OperatorContext context)
     {
       done = false;
       result = new HashMap<>();
     }
-    
+
     public final transient DefaultInputPort<KeyValPair<String, Long>> input = new DefaultInputPort<KeyValPair<String, Long>>()
     {
       @Override
@@ -74,7 +74,7 @@ public class MinimalWordCount implements StreamingApplication
       }
     };
   }
-  
+
   /**
    * Populate the dag using High-Level API.
    * @param dag
@@ -93,7 +93,7 @@ public class MinimalWordCount implements StreamingApplication
           public Iterable<String> f(String input)
           {
             return Arrays.asList(input.split("[^a-zA-Z']+"));
-          
+
           }
         }, name("ExtractWords"))
         // Apply windowing to the stream for counting, in this case, the window option is global window.